From df98552b6f1e3f81c153703b9a47c0d7e62890d6 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Fri, 16 Aug 2024 11:18:54 -0700 Subject: [PATCH 01/80] core[patch]: Release 0.2.33 (#25498) --- libs/core/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/core/pyproject.toml b/libs/core/pyproject.toml index 9c2f556fc4166..b2e19e7563f72 100644 --- a/libs/core/pyproject.toml +++ b/libs/core/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "langchain-core" -version = "0.2.32" +version = "0.2.33" description = "Building applications with LLMs through composability" authors = [] license = "MIT" From a06818a6543793e1b21f5896135244949e02c8e1 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Fri, 16 Aug 2024 11:30:17 -0700 Subject: [PATCH 02/80] openai[patch]: update core dep (#25502) --- libs/partners/openai/poetry.lock | 233 +++++++++++++++------------- libs/partners/openai/pyproject.toml | 2 +- 2 files changed, 122 insertions(+), 113 deletions(-) diff --git a/libs/partners/openai/poetry.lock b/libs/partners/openai/poetry.lock index 82a758795111e..cfdc077269cff 100644 --- a/libs/partners/openai/poetry.lock +++ b/libs/partners/openai/poetry.lock @@ -475,7 +475,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.2.29" +version = "0.2.33" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -518,13 +518,13 @@ url = "../../standard-tests" [[package]] name = "langsmith" -version = "0.1.98" +version = "0.1.99" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.98-py3-none-any.whl", hash = "sha256:f79e8a128652bbcee4606d10acb6236973b5cd7dde76e3741186d3b97b5698e9"}, - {file = "langsmith-0.1.98.tar.gz", hash = "sha256:e07678219a0502e8f26d35294e72127a39d25e32fafd091af5a7bb661e9a6bd1"}, + {file = "langsmith-0.1.99-py3-none-any.whl", hash = "sha256:ef8d1d74a2674c514aa429b0171a9fbb661207dc3835142cca0e8f1bf97b26b0"}, + {file = "langsmith-0.1.99.tar.gz", hash = "sha256:b5c6a1f158abda61600a4a445081ee848b4a28b758d91f2793dc02aeffafcaf1"}, ] [package.dependencies] @@ -677,13 +677,13 @@ files = [ [[package]] name = "openai" -version = "1.40.1" +version = "1.40.8" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.40.1-py3-none-any.whl", hash = "sha256:cf5929076c6ca31c26f1ed207e9fd19eb05404cc9104f64c9d29bb0ac0c5bcd4"}, - {file = "openai-1.40.1.tar.gz", hash = "sha256:cb1294ac1f8c6a1acbb07e090698eb5ad74a7a88484e77126612a4f22579673d"}, + {file = "openai-1.40.8-py3-none-any.whl", hash = "sha256:3ed4ddad48e0dde059c9b4d3dc240e47781beca2811e52ba449ddc4a471a2fd4"}, + {file = "openai-1.40.8.tar.gz", hash = "sha256:e225f830b946378e214c5b2cfa8df28ba2aeb7e9d44f738cb2a926fd971f5bc0"}, ] [package.dependencies] @@ -701,62 +701,68 @@ datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] [[package]] name = "orjson" -version = "3.10.6" +version = "3.10.7" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.6-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:fb0ee33124db6eaa517d00890fc1a55c3bfe1cf78ba4a8899d71a06f2d6ff5c7"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c1c4b53b24a4c06547ce43e5fee6ec4e0d8fe2d597f4647fc033fd205707365"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eadc8fd310edb4bdbd333374f2c8fec6794bbbae99b592f448d8214a5e4050c0"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61272a5aec2b2661f4fa2b37c907ce9701e821b2c1285d5c3ab0207ebd358d38"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57985ee7e91d6214c837936dc1608f40f330a6b88bb13f5a57ce5257807da143"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:633a3b31d9d7c9f02d49c4ab4d0a86065c4a6f6adc297d63d272e043472acab5"}, - {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1c680b269d33ec444afe2bdc647c9eb73166fa47a16d9a75ee56a374f4a45f43"}, - {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f759503a97a6ace19e55461395ab0d618b5a117e8d0fbb20e70cfd68a47327f2"}, - {file = "orjson-3.10.6-cp310-none-win32.whl", hash = "sha256:95a0cce17f969fb5391762e5719575217bd10ac5a189d1979442ee54456393f3"}, - {file = "orjson-3.10.6-cp310-none-win_amd64.whl", hash = "sha256:df25d9271270ba2133cc88ee83c318372bdc0f2cd6f32e7a450809a111efc45c"}, - {file = "orjson-3.10.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b1ec490e10d2a77c345def52599311849fc063ae0e67cf4f84528073152bb2ba"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d43d3feb8f19d07e9f01e5b9be4f28801cf7c60d0fa0d279951b18fae1932b"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3045267e98fe749408eee1593a142e02357c5c99be0802185ef2170086a863"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27bc6a28ae95923350ab382c57113abd38f3928af3c80be6f2ba7eb8d8db0b0"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d27456491ca79532d11e507cadca37fb8c9324a3976294f68fb1eff2dc6ced5a"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05ac3d3916023745aa3b3b388e91b9166be1ca02b7c7e41045da6d12985685f0"}, - {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1335d4ef59ab85cab66fe73fd7a4e881c298ee7f63ede918b7faa1b27cbe5212"}, - {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4bbc6d0af24c1575edc79994c20e1b29e6fb3c6a570371306db0993ecf144dc5"}, - {file = "orjson-3.10.6-cp311-none-win32.whl", hash = "sha256:450e39ab1f7694465060a0550b3f6d328d20297bf2e06aa947b97c21e5241fbd"}, - {file = "orjson-3.10.6-cp311-none-win_amd64.whl", hash = "sha256:227df19441372610b20e05bdb906e1742ec2ad7a66ac8350dcfd29a63014a83b"}, - {file = "orjson-3.10.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ea2977b21f8d5d9b758bb3f344a75e55ca78e3ff85595d248eee813ae23ecdfb"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6f3d167d13a16ed263b52dbfedff52c962bfd3d270b46b7518365bcc2121eed"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f710f346e4c44a4e8bdf23daa974faede58f83334289df80bc9cd12fe82573c7"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7275664f84e027dcb1ad5200b8b18373e9c669b2a9ec33d410c40f5ccf4b257e"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0943e4c701196b23c240b3d10ed8ecd674f03089198cf503105b474a4f77f21f"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:446dee5a491b5bc7d8f825d80d9637e7af43f86a331207b9c9610e2f93fee22a"}, - {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:64c81456d2a050d380786413786b057983892db105516639cb5d3ee3c7fd5148"}, - {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:960db0e31c4e52fa0fc3ecbaea5b2d3b58f379e32a95ae6b0ebeaa25b93dfd34"}, - {file = "orjson-3.10.6-cp312-none-win32.whl", hash = "sha256:a6ea7afb5b30b2317e0bee03c8d34c8181bc5a36f2afd4d0952f378972c4efd5"}, - {file = "orjson-3.10.6-cp312-none-win_amd64.whl", hash = "sha256:874ce88264b7e655dde4aeaacdc8fd772a7962faadfb41abe63e2a4861abc3dc"}, - {file = "orjson-3.10.6-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:66680eae4c4e7fc193d91cfc1353ad6d01b4801ae9b5314f17e11ba55e934183"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caff75b425db5ef8e8f23af93c80f072f97b4fb3afd4af44482905c9f588da28"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3722fddb821b6036fd2a3c814f6bd9b57a89dc6337b9924ecd614ebce3271394"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2c116072a8533f2fec435fde4d134610f806bdac20188c7bd2081f3e9e0133f"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6eeb13218c8cf34c61912e9df2de2853f1d009de0e46ea09ccdf3d757896af0a"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965a916373382674e323c957d560b953d81d7a8603fbeee26f7b8248638bd48b"}, - {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03c95484d53ed8e479cade8628c9cea00fd9d67f5554764a1110e0d5aa2de96e"}, - {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e060748a04cccf1e0a6f2358dffea9c080b849a4a68c28b1b907f272b5127e9b"}, - {file = "orjson-3.10.6-cp38-none-win32.whl", hash = "sha256:738dbe3ef909c4b019d69afc19caf6b5ed0e2f1c786b5d6215fbb7539246e4c6"}, - {file = "orjson-3.10.6-cp38-none-win_amd64.whl", hash = "sha256:d40f839dddf6a7d77114fe6b8a70218556408c71d4d6e29413bb5f150a692ff7"}, - {file = "orjson-3.10.6-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:697a35a083c4f834807a6232b3e62c8b280f7a44ad0b759fd4dce748951e70db"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd502f96bf5ea9a61cbc0b2b5900d0dd68aa0da197179042bdd2be67e51a1e4b"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f215789fb1667cdc874c1b8af6a84dc939fd802bf293a8334fce185c79cd359b"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2debd8ddce948a8c0938c8c93ade191d2f4ba4649a54302a7da905a81f00b56"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5410111d7b6681d4b0d65e0f58a13be588d01b473822483f77f513c7f93bd3b2"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb1f28a137337fdc18384079fa5726810681055b32b92253fa15ae5656e1dddb"}, - {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf2fbbce5fe7cd1aa177ea3eab2b8e6a6bc6e8592e4279ed3db2d62e57c0e1b2"}, - {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:79b9b9e33bd4c517445a62b90ca0cc279b0f1f3970655c3df9e608bc3f91741a"}, - {file = "orjson-3.10.6-cp39-none-win32.whl", hash = "sha256:30b0a09a2014e621b1adf66a4f705f0809358350a757508ee80209b2d8dae219"}, - {file = "orjson-3.10.6-cp39-none-win_amd64.whl", hash = "sha256:49e3bc615652617d463069f91b867a4458114c5b104e13b7ae6872e5f79d0844"}, - {file = "orjson-3.10.6.tar.gz", hash = "sha256:e54b63d0a7c6c54a5f5f726bc93a2078111ef060fec4ecbf34c5db800ca3b3a7"}, + {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84"}, + {file = "orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175"}, + {file = "orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c"}, + {file = "orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0"}, + {file = "orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f"}, + {file = "orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5"}, + {file = "orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b"}, + {file = "orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb"}, + {file = "orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1"}, + {file = "orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149"}, + {file = "orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad"}, + {file = "orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2"}, + {file = "orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024"}, + {file = "orjson-3.10.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866"}, + {file = "orjson-3.10.7-cp38-none-win32.whl", hash = "sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c"}, + {file = "orjson-3.10.7-cp38-none-win_amd64.whl", hash = "sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e"}, + {file = "orjson-3.10.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5"}, + {file = "orjson-3.10.7-cp39-none-win32.whl", hash = "sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2"}, + {file = "orjson-3.10.7-cp39-none-win_amd64.whl", hash = "sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58"}, + {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, ] [[package]] @@ -1282,29 +1288,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.5.6" +version = "0.5.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.6-py3-none-linux_armv6l.whl", hash = "sha256:a0ef5930799a05522985b9cec8290b185952f3fcd86c1772c3bdbd732667fdcd"}, - {file = "ruff-0.5.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b652dc14f6ef5d1552821e006f747802cc32d98d5509349e168f6bf0ee9f8f42"}, - {file = "ruff-0.5.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:80521b88d26a45e871f31e4b88938fd87db7011bb961d8afd2664982dfc3641a"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9bc8f328a9f1309ae80e4d392836e7dbc77303b38ed4a7112699e63d3b066ab"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d394940f61f7720ad371ddedf14722ee1d6250fd8d020f5ea5a86e7be217daf"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111a99cdb02f69ddb2571e2756e017a1496c2c3a2aeefe7b988ddab38b416d36"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e395daba77a79f6dc0d07311f94cc0560375ca20c06f354c7c99af3bf4560c5d"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c476acb43c3c51e3c614a2e878ee1589655fa02dab19fe2db0423a06d6a5b1b6"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2ff8003f5252fd68425fd53d27c1f08b201d7ed714bb31a55c9ac1d4c13e2eb"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c94e084ba3eaa80c2172918c2ca2eb2230c3f15925f4ed8b6297260c6ef179ad"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1f77c1c3aa0669fb230b06fb24ffa3e879391a3ba3f15e3d633a752da5a3e670"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f908148c93c02873210a52cad75a6eda856b2cbb72250370ce3afef6fb99b1ed"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:563a7ae61ad284187d3071d9041c08019975693ff655438d8d4be26e492760bd"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:94fe60869bfbf0521e04fd62b74cbca21cbc5beb67cbb75ab33fe8c174f54414"}, - {file = "ruff-0.5.6-py3-none-win32.whl", hash = "sha256:e6a584c1de6f8591c2570e171cc7ce482bb983d49c70ddf014393cd39e9dfaed"}, - {file = "ruff-0.5.6-py3-none-win_amd64.whl", hash = "sha256:d7fe7dccb1a89dc66785d7aa0ac283b2269712d8ed19c63af908fdccca5ccc1a"}, - {file = "ruff-0.5.6-py3-none-win_arm64.whl", hash = "sha256:57c6c0dd997b31b536bff49b9eee5ed3194d60605a4427f735eeb1f9c1b8d264"}, - {file = "ruff-0.5.6.tar.gz", hash = "sha256:07c9e3c2a8e1fe377dd460371c3462671a728c981c3205a5217291422209f642"}, + {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, + {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, + {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, + {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, + {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, + {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, + {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, ] [[package]] @@ -1482,43 +1488,46 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "watchdog" -version = "4.0.1" +version = "4.0.2" description = "Filesystem events monitoring" optional = false python-versions = ">=3.8" files = [ - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, - {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, - {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, - {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, - {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"}, + {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"}, + {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"}, + {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"}, + {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"}, ] [package.extras] @@ -1527,4 +1536,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "cab8db174a065e75e0d720ef93633060c81e0c18052f42a6d6b3cdbf36bd7c34" +content-hash = "db9b3d367ecba136594ed6a7d3bba88b4192a05a4c69e01eae274a2a61dd5971" diff --git a/libs/partners/openai/pyproject.toml b/libs/partners/openai/pyproject.toml index e4ec1b1085efd..eb3176808d6a2 100644 --- a/libs/partners/openai/pyproject.toml +++ b/libs/partners/openai/pyproject.toml @@ -23,7 +23,7 @@ ignore_missing_imports = true [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -langchain-core = "^0.2.29" +langchain-core = "^0.2.33" openai = "^1.40.0" tiktoken = ">=0.7,<1" From a2e90a5a43bb2b70bed99fd280c054cc1cb999de Mon Sep 17 00:00:00 2001 From: Isaac Francisco <78627776+isahers1@users.noreply.github.com> Date: Fri, 16 Aug 2024 13:20:37 -0700 Subject: [PATCH 03/80] add embeddings integration tests (#25508) --- .../integration_tests/test_embeddings.py | 25 +++++----- .../integration_tests/__init__.py | 5 ++ .../integration_tests/embeddings.py | 49 +++++++++++++++++++ .../unit_tests/__init__.py | 3 +- .../unit_tests/embeddings.py | 28 +++++++++++ 5 files changed, 95 insertions(+), 15 deletions(-) create mode 100644 libs/standard-tests/langchain_standard_tests/integration_tests/embeddings.py create mode 100644 libs/standard-tests/langchain_standard_tests/unit_tests/embeddings.py diff --git a/libs/partners/ollama/tests/integration_tests/test_embeddings.py b/libs/partners/ollama/tests/integration_tests/test_embeddings.py index e0310bf3b0f21..26722362e6b66 100644 --- a/libs/partners/ollama/tests/integration_tests/test_embeddings.py +++ b/libs/partners/ollama/tests/integration_tests/test_embeddings.py @@ -1,20 +1,17 @@ """Test Ollama embeddings.""" -from langchain_ollama.embeddings import OllamaEmbeddings +from typing import Type + +from langchain_standard_tests.integration_tests import EmbeddingsIntegrationTests +from langchain_ollama.embeddings import OllamaEmbeddings -def test_langchain_ollama_embedding_documents() -> None: - """Test cohere embeddings.""" - documents = ["foo bar"] - embedding = OllamaEmbeddings(model="llama3") - output = embedding.embed_documents(documents) - assert len(output) == 1 - assert len(output[0]) > 0 +class TestOllamaEmbeddings(EmbeddingsIntegrationTests): + @property + def embeddings_class(self) -> Type[OllamaEmbeddings]: + return OllamaEmbeddings -def test_langchain_ollama_embedding_query() -> None: - """Test cohere embeddings.""" - document = "foo bar" - embedding = OllamaEmbeddings(model="llama3") - output = embedding.embed_query(document) - assert len(output) > 0 + @property + def embedding_model_params(self) -> dict: + return {"model": "llama3:latest"} diff --git a/libs/standard-tests/langchain_standard_tests/integration_tests/__init__.py b/libs/standard-tests/langchain_standard_tests/integration_tests/__init__.py index f304bff2381b4..ef9b6b05acfb8 100644 --- a/libs/standard-tests/langchain_standard_tests/integration_tests/__init__.py +++ b/libs/standard-tests/langchain_standard_tests/integration_tests/__init__.py @@ -9,6 +9,7 @@ "cache", "chat_models", "vectorstores", + "embeddings", ] for module in modules: @@ -19,7 +20,11 @@ from langchain_standard_tests.integration_tests.chat_models import ( ChatModelIntegrationTests, ) +from langchain_standard_tests.integration_tests.embeddings import ( + EmbeddingsIntegrationTests, +) __all__ = [ "ChatModelIntegrationTests", + "EmbeddingsIntegrationTests", ] diff --git a/libs/standard-tests/langchain_standard_tests/integration_tests/embeddings.py b/libs/standard-tests/langchain_standard_tests/integration_tests/embeddings.py new file mode 100644 index 0000000000000..1de2aece78023 --- /dev/null +++ b/libs/standard-tests/langchain_standard_tests/integration_tests/embeddings.py @@ -0,0 +1,49 @@ +from typing import List + +from langchain_core.embeddings import Embeddings + +from langchain_standard_tests.unit_tests.embeddings import EmbeddingsTests + + +class EmbeddingsIntegrationTests(EmbeddingsTests): + def test_embed_query(self, model: Embeddings) -> None: + embedding_1 = model.embed_query("foo") + + assert isinstance(embedding_1, List) + assert isinstance(embedding_1[0], float) + + embedding_2 = model.embed_query("bar") + + assert len(embedding_1) > 0 + assert len(embedding_1) == len(embedding_2) + + def test_embed_documents(self, model: Embeddings) -> None: + documents = ["foo", "bar", "baz"] + embeddings = model.embed_documents(documents) + + assert len(embeddings) == len(documents) + assert all(isinstance(embedding, List) for embedding in embeddings) + assert all(isinstance(embedding[0], float) for embedding in embeddings) + assert len(embeddings[0]) > 0 + assert all(len(embedding) == len(embeddings[0]) for embedding in embeddings) + + async def test_aembed_query(self, model: Embeddings) -> None: + embedding_1 = await model.aembed_query("foo") + + assert isinstance(embedding_1, List) + assert isinstance(embedding_1[0], float) + + embedding_2 = await model.aembed_query("bar") + + assert len(embedding_1) > 0 + assert len(embedding_1) == len(embedding_2) + + async def test_aembed_documents(self, model: Embeddings) -> None: + documents = ["foo", "bar", "baz"] + embeddings = await model.aembed_documents(documents) + + assert len(embeddings) == len(documents) + assert all(isinstance(embedding, List) for embedding in embeddings) + assert all(isinstance(embedding[0], float) for embedding in embeddings) + assert len(embeddings[0]) > 0 + assert all(len(embedding) == len(embeddings[0]) for embedding in embeddings) diff --git a/libs/standard-tests/langchain_standard_tests/unit_tests/__init__.py b/libs/standard-tests/langchain_standard_tests/unit_tests/__init__.py index 418330b5dc184..14715260ea539 100644 --- a/libs/standard-tests/langchain_standard_tests/unit_tests/__init__.py +++ b/libs/standard-tests/langchain_standard_tests/unit_tests/__init__.py @@ -6,6 +6,7 @@ # https://docs.pytest.org/en/7.1.x/how-to/writing_plugins.html#assertion-rewriting modules = [ "chat_models", + "embeddings", ] for module in modules: @@ -13,4 +14,4 @@ from langchain_standard_tests.unit_tests.chat_models import ChatModelUnitTests -__all__ = ["ChatModelUnitTests"] +__all__ = ["ChatModelUnitTests", "EmbeddingsUnitTests"] diff --git a/libs/standard-tests/langchain_standard_tests/unit_tests/embeddings.py b/libs/standard-tests/langchain_standard_tests/unit_tests/embeddings.py new file mode 100644 index 0000000000000..28e8c66bbd6fd --- /dev/null +++ b/libs/standard-tests/langchain_standard_tests/unit_tests/embeddings.py @@ -0,0 +1,28 @@ +from abc import abstractmethod +from typing import Type + +import pytest +from langchain_core.embeddings import Embeddings + +from langchain_standard_tests.base import BaseStandardTests + + +class EmbeddingsTests(BaseStandardTests): + @property + @abstractmethod + def embeddings_class(self) -> Type[Embeddings]: + ... + + @property + def embedding_model_params(self) -> dict: + return {} + + @pytest.fixture + def model(self) -> Embeddings: + return self.embeddings_class(**self.embedding_model_params) + + +class EmbeddingsUnitTests(EmbeddingsTests): + def test_init(self) -> None: + model = self.embeddings_class(**self.embedding_model_params) + assert model is not None From c1bd4e05bcd180a36ba7b5d89ca2b5ce4f1e5d4a Mon Sep 17 00:00:00 2001 From: Chengzu Ou Date: Fri, 16 Aug 2024 13:24:30 -0700 Subject: [PATCH 04/80] docs: fix Databricks Vector Search demo notebook (#25504) **Description:** This PR fixes an issue in the demo notebook of Databricks Vector Search in "Work with Delta Sync Index" section. **Issue:** N/A **Dependencies:** N/A --------- Co-authored-by: Chengzu Ou Co-authored-by: Erick Friis --- .../vectorstores/databricks_vector_search.ipynb | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/docs/docs/integrations/vectorstores/databricks_vector_search.ipynb b/docs/docs/integrations/vectorstores/databricks_vector_search.ipynb index 8509b506910ba..dad7b5d0c7449 100644 --- a/docs/docs/integrations/vectorstores/databricks_vector_search.ipynb +++ b/docs/docs/integrations/vectorstores/databricks_vector_search.ipynb @@ -122,7 +122,7 @@ "outputs": [], "source": [ "vector_search_endpoint_name = \"vector_search_demo_endpoint\"\n", - "index_name = \"ml.llm.demo_index\"\n", + "index_name = \"vector_search_demo.vector_search.state_of_the_union_index\"\n", "\n", "index = vsc.create_direct_access_index(\n", " endpoint_name=vector_search_endpoint_name,\n", @@ -206,7 +206,16 @@ "metadata": {}, "outputs": [], "source": [ - "dvs_delta_sync = DatabricksVectorSearch(\"catalog_name.schema_name.delta_sync_index\")\n", + "delta_sync_index = vsc.create_delta_sync_index(\n", + " endpoint_name=vector_search_endpoint_name,\n", + " source_table_name=\"vector_search_demo.vector_search.state_of_the_union\",\n", + " index_name=\"vector_search_demo.vector_search.state_of_the_union_index\",\n", + " pipeline_type=\"TRIGGERED\",\n", + " primary_key=\"id\",\n", + " embedding_source_column=\"text\",\n", + " embedding_model_endpoint_name=\"e5-small-v2\",\n", + ")\n", + "dvs_delta_sync = DatabricksVectorSearch(delta_sync_index)\n", "dvs_delta_sync.similarity_search(query)" ] } From f6e6a1787815195e849bf9e133980d8cbafa52f2 Mon Sep 17 00:00:00 2001 From: gbaian10 <34255899+gbaian10@users.noreply.github.com> Date: Sun, 18 Aug 2024 14:22:49 +0800 Subject: [PATCH 05/80] docs: add prompt to install nltk (#25519) fix: #25473 - **Description:** add prompt to install nltk - **Issue:** #25473 --- docs/docs/how_to/document_loader_markdown.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/how_to/document_loader_markdown.ipynb b/docs/docs/how_to/document_loader_markdown.ipynb index b68d4eadc1496..a5a3e0138847a 100644 --- a/docs/docs/how_to/document_loader_markdown.ipynb +++ b/docs/docs/how_to/document_loader_markdown.ipynb @@ -26,7 +26,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install \"unstructured[md]\"" + "%pip install \"unstructured[md]\" nltk" ] }, { From bda3becbe77a22ce49d77c59035727f3b2ed64f1 Mon Sep 17 00:00:00 2001 From: gbaian10 <34255899+gbaian10@users.noreply.github.com> Date: Sun, 18 Aug 2024 14:23:24 +0800 Subject: [PATCH 06/80] docs: add prompt to install beautifulsoup4. (#25518) fix: #25482 - **Description:** Add a prompt to install beautifulsoup4 in places where `from langchain_community.document_loaders import WebBaseLoader` is used. - **Issue:** #25482 --- docs/docs/how_to/qa_chat_history_how_to.ipynb | 2 +- docs/docs/how_to/qa_sources.ipynb | 2 +- docs/docs/how_to/qa_streaming.ipynb | 2 +- docs/docs/integrations/callbacks/infino.ipynb | 3 ++- .../document_transformers/google_cloud_vertexai_rerank.ipynb | 2 +- docs/docs/tutorials/local_rag.ipynb | 5 ++++- docs/docs/tutorials/qa_chat_history.ipynb | 2 +- docs/docs/tutorials/summarization.ipynb | 2 +- .../migrating_chains/conversation_retrieval_chain.ipynb | 2 +- docs/docs/versions/migrating_chains/retrieval_qa.ipynb | 2 +- 10 files changed, 14 insertions(+), 10 deletions(-) diff --git a/docs/docs/how_to/qa_chat_history_how_to.ipynb b/docs/docs/how_to/qa_chat_history_how_to.ipynb index fc82be236277f..7d76f804d6e8c 100644 --- a/docs/docs/how_to/qa_chat_history_how_to.ipynb +++ b/docs/docs/how_to/qa_chat_history_how_to.ipynb @@ -42,7 +42,7 @@ "outputs": [], "source": [ "%%capture --no-stderr\n", - "%pip install --upgrade --quiet langchain langchain-community langchain-chroma bs4" + "%pip install --upgrade --quiet langchain langchain-community langchain-chroma beautifulsoup4" ] }, { diff --git a/docs/docs/how_to/qa_sources.ipynb b/docs/docs/how_to/qa_sources.ipynb index 98d220a677602..32f27a7d7b39c 100644 --- a/docs/docs/how_to/qa_sources.ipynb +++ b/docs/docs/how_to/qa_sources.ipynb @@ -40,7 +40,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install --upgrade --quiet langchain langchain-community langchainhub langchain-openai langchain-chroma bs4" + "%pip install --upgrade --quiet langchain langchain-community langchainhub langchain-openai langchain-chroma beautifulsoup4" ] }, { diff --git a/docs/docs/how_to/qa_streaming.ipynb b/docs/docs/how_to/qa_streaming.ipynb index 003204058dc09..67b80b1d6fe4b 100644 --- a/docs/docs/how_to/qa_streaming.ipynb +++ b/docs/docs/how_to/qa_streaming.ipynb @@ -33,7 +33,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install --upgrade --quiet langchain langchain-community langchainhub langchain-openai langchain-chroma bs4" + "%pip install --upgrade --quiet langchain langchain-community langchainhub langchain-openai langchain-chroma beautifulsoup4" ] }, { diff --git a/docs/docs/integrations/callbacks/infino.ipynb b/docs/docs/integrations/callbacks/infino.ipynb index ff254ff687a66..e0f9304086136 100644 --- a/docs/docs/integrations/callbacks/infino.ipynb +++ b/docs/docs/integrations/callbacks/infino.ipynb @@ -37,7 +37,8 @@ "%pip install --upgrade --quiet infinopy\n", "%pip install --upgrade --quiet matplotlib\n", "%pip install --upgrade --quiet tiktoken\n", - "%pip install --upgrade --quiet langchain langchain-openai langchain-community" + "%pip install --upgrade --quiet langchain langchain-openai langchain-community\n", + "%pip install --upgrade --quiet beautifulsoup4" ] }, { diff --git a/docs/docs/integrations/document_transformers/google_cloud_vertexai_rerank.ipynb b/docs/docs/integrations/document_transformers/google_cloud_vertexai_rerank.ipynb index c265d0d8bb527..e5b5adaa21ceb 100644 --- a/docs/docs/integrations/document_transformers/google_cloud_vertexai_rerank.ipynb +++ b/docs/docs/integrations/document_transformers/google_cloud_vertexai_rerank.ipynb @@ -23,7 +23,7 @@ }, "outputs": [], "source": [ - "%pip install --upgrade --quiet langchain langchain-community langchain-google-community langchain-google-community[vertexaisearch] langchain-google-vertexai langchain-chroma langchain-text-splitters" + "%pip install --upgrade --quiet langchain langchain-community langchain-google-community langchain-google-community[vertexaisearch] langchain-google-vertexai langchain-chroma langchain-text-splitters beautifulsoup4" ] }, { diff --git a/docs/docs/tutorials/local_rag.ipynb b/docs/docs/tutorials/local_rag.ipynb index 5d122de3c4b99..bdff9dfdfcffe 100644 --- a/docs/docs/tutorials/local_rag.ipynb +++ b/docs/docs/tutorials/local_rag.ipynb @@ -57,7 +57,10 @@ "%pip install -qU langchain_chroma\n", "\n", "# Local inference and embeddings via Ollama\n", - "%pip install -qU langchain_ollama" + "%pip install -qU langchain_ollama\n", + "\n", + "# Web Loader\n", + "% pip install -qU beautifulsoup4" ] }, { diff --git a/docs/docs/tutorials/qa_chat_history.ipynb b/docs/docs/tutorials/qa_chat_history.ipynb index 0497f8e943d3b..51a34e88b84a1 100644 --- a/docs/docs/tutorials/qa_chat_history.ipynb +++ b/docs/docs/tutorials/qa_chat_history.ipynb @@ -65,7 +65,7 @@ "outputs": [], "source": [ "%%capture --no-stderr\n", - "%pip install --upgrade --quiet langchain langchain-community langchainhub langchain-chroma bs4" + "%pip install --upgrade --quiet langchain langchain-community langchainhub langchain-chroma beautifulsoup4" ] }, { diff --git a/docs/docs/tutorials/summarization.ipynb b/docs/docs/tutorials/summarization.ipynb index 410bd17f5ab2d..0daae27ff4be0 100644 --- a/docs/docs/tutorials/summarization.ipynb +++ b/docs/docs/tutorials/summarization.ipynb @@ -157,7 +157,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install --upgrade --quiet langchain-openai tiktoken chromadb langchain\n", + "%pip install --upgrade --quiet langchain-openai tiktoken chromadb langchain beautifulsoup4\n", "\n", "# Set env var OPENAI_API_KEY or load from a .env file\n", "# import dotenv\n", diff --git a/docs/docs/versions/migrating_chains/conversation_retrieval_chain.ipynb b/docs/docs/versions/migrating_chains/conversation_retrieval_chain.ipynb index ae25068f6c396..841eca7c44c85 100644 --- a/docs/docs/versions/migrating_chains/conversation_retrieval_chain.ipynb +++ b/docs/docs/versions/migrating_chains/conversation_retrieval_chain.ipynb @@ -34,7 +34,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install --upgrade --quiet langchain-community langchain langchain-openai faiss-cpu" + "%pip install --upgrade --quiet langchain-community langchain langchain-openai faiss-cpu beautifulsoup4" ] }, { diff --git a/docs/docs/versions/migrating_chains/retrieval_qa.ipynb b/docs/docs/versions/migrating_chains/retrieval_qa.ipynb index 4dc777a603aa0..2f1c45daa9027 100644 --- a/docs/docs/versions/migrating_chains/retrieval_qa.ipynb +++ b/docs/docs/versions/migrating_chains/retrieval_qa.ipynb @@ -33,7 +33,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install --upgrade --quiet langchain-community langchain langchain-openai faiss-cpu" + "%pip install --upgrade --quiet langchain-community langchain langchain-openai faiss-cpu beautifulsoup4" ] }, { From 937b3904ebe84e6e26da22ef79c08a0c50e83b3a Mon Sep 17 00:00:00 2001 From: Hassan El Mghari Date: Sun, 18 Aug 2024 10:48:30 -0700 Subject: [PATCH 07/80] together[patch]: update base url (#25524) Updated the Together base URL from `.ai` to `.xyz` since some customers have reported problems with `.ai`. --- libs/partners/together/langchain_together/chat_models.py | 4 ++-- libs/partners/together/langchain_together/embeddings.py | 6 +++--- libs/partners/together/langchain_together/llms.py | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/libs/partners/together/langchain_together/chat_models.py b/libs/partners/together/langchain_together/chat_models.py index cb749fbaaee4b..76d79d8d29d75 100644 --- a/libs/partners/together/langchain_together/chat_models.py +++ b/libs/partners/together/langchain_together/chat_models.py @@ -315,12 +315,12 @@ def _get_ls_params( default_factory=secret_from_env("TOGETHER_API_KEY", default=None), ) """Together AI API key. - + Automatically read from env variable `TOGETHER_API_KEY` if not provided. """ together_api_base: str = Field( default_factory=from_env( - "TOGETHER_API_BASE", default="https://api.together.ai/v1/" + "TOGETHER_API_BASE", default="https://api.together.xyz/v1/" ), alias="base_url", ) diff --git a/libs/partners/together/langchain_together/embeddings.py b/libs/partners/together/langchain_together/embeddings.py index 3757480d13977..80961b3b4af07 100644 --- a/libs/partners/together/langchain_together/embeddings.py +++ b/libs/partners/together/langchain_together/embeddings.py @@ -106,7 +106,7 @@ class TogetherEmbeddings(BaseModel, Embeddings): client: Any = Field(default=None, exclude=True) #: :meta private: async_client: Any = Field(default=None, exclude=True) #: :meta private: model: str = "togethercomputer/m2-bert-80M-8k-retrieval" - """Embeddings model name to use. + """Embeddings model name to use. Instead, use 'togethercomputer/m2-bert-80M-8k-retrieval' for example. """ dimensions: Optional[int] = None @@ -119,12 +119,12 @@ class TogetherEmbeddings(BaseModel, Embeddings): default_factory=secret_from_env("TOGETHER_API_KEY", default=None), ) """Together AI API key. - + Automatically read from env variable `TOGETHER_API_KEY` if not provided. """ together_api_base: str = Field( default_factory=from_env( - "TOGETHER_API_BASE", default="https://api.together.ai/v1/" + "TOGETHER_API_BASE", default="https://api.together.xyz/v1/" ), alias="base_url", ) diff --git a/libs/partners/together/langchain_together/llms.py b/libs/partners/together/langchain_together/llms.py index 4d78149d57341..279fbcaaaf2da 100644 --- a/libs/partners/together/langchain_together/llms.py +++ b/libs/partners/together/langchain_together/llms.py @@ -36,14 +36,14 @@ class Together(LLM): model = Together(model_name="mistralai/Mixtral-8x7B-Instruct-v0.1") """ - base_url: str = "https://api.together.ai/v1/completions" + base_url: str = "https://api.together.xyz/v1/completions" """Base completions API URL.""" together_api_key: SecretStr = Field( alias="api_key", default_factory=secret_from_env("TOGETHER_API_KEY"), ) """Together AI API key. - + Automatically read from env variable `TOGETHER_API_KEY` if not provided. """ model: str From 49dea06af15a110518f7a119260f609a4b5f74fe Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Sun, 18 Aug 2024 12:15:52 -0700 Subject: [PATCH 08/80] docs: fix Agent deprecation msg (#25464) --- docs/api_reference/create_api_rst.py | 93 ++++++++++--------- libs/langchain/langchain/agents/agent.py | 4 +- .../langchain/langchain/agents/agent_types.py | 2 +- 3 files changed, 50 insertions(+), 49 deletions(-) diff --git a/docs/api_reference/create_api_rst.py b/docs/api_reference/create_api_rst.py index 2fec7b300e0f5..5d7c558cea412 100644 --- a/docs/api_reference/create_api_rst.py +++ b/docs/api_reference/create_api_rst.py @@ -318,7 +318,7 @@ def _construct_doc( index_autosummary += f""" :ref:`{package_namespace}_{module}` -{'^' * (len(module) + 5)} +{'^' * (len(package_namespace) + len(module) + 8)} """ if classes: @@ -448,7 +448,6 @@ def _construct_doc( """ docs.append((f"{module}.rst", module_doc)) docs.append(("index.rst", index_doc + index_autosummary)) - return docs @@ -530,47 +529,7 @@ def _build_index(dirs: List[str]) -> None: ordered = ["core", "langchain", "text-splitters", "community", "experimental"] main_ = [dir_ for dir_ in ordered if dir_ in dirs] integrations = sorted(dir_ for dir_ in dirs if dir_ not in main_) - main_headers = [ - " ".join(custom_names.get(x, x.title()) for x in dir_.split("-")) - for dir_ in main_ - ] - integration_headers = [ - " ".join( - custom_names.get(x, x.title().replace("ai", "AI").replace("db", "DB")) - for x in dir_.split("-") - ) - for dir_ in integrations - ] - main_tree = "\n".join( - f"{header_name}<{dir_.replace('-', '_')}/index>" - for header_name, dir_ in zip(main_headers, main_) - ) - main_grid = "\n".join( - f'- header: "**{header_name}**"\n content: "{_package_namespace(dir_).replace("_", "-")}: {_get_package_version(_package_dir(dir_))}"\n link: {dir_.replace("-", "_")}/index.html' - for header_name, dir_ in zip(main_headers, main_) - ) - integration_tree = "\n".join( - f"{header_name}<{dir_.replace('-', '_')}/index>" - for header_name, dir_ in zip(integration_headers, integrations) - ) - - integration_grid = "" - integrations_to_show = [ - "openai", - "anthropic", - "google-vertexai", - "aws", - "huggingface", - "mistralai", - ] - for header_name, dir_ in sorted( - zip(integration_headers, integrations), - key=lambda h_d: integrations_to_show.index(h_d[1]) - if h_d[1] in integrations_to_show - else len(integrations_to_show), - )[: len(integrations_to_show)]: - integration_grid += f'\n- header: "**{header_name}**"\n content: {_package_namespace(dir_).replace("_", "-")} {_get_package_version(_package_dir(dir_))}\n link: {dir_.replace("-", "_")}/index.html' - doc = f"""# LangChain Python API Reference + doc = """# LangChain Python API Reference Welcome to the LangChain Python API reference. This is a reference for all `langchain-x` packages. @@ -578,8 +537,22 @@ def _build_index(dirs: List[str]) -> None: For user guides see [https://python.langchain.com](https://python.langchain.com). For the legacy API reference hosted on ReadTheDocs see [https://api.python.langchain.com/](https://api.python.langchain.com/). +""" -## Base packages + if main_: + main_headers = [ + " ".join(custom_names.get(x, x.title()) for x in dir_.split("-")) + for dir_ in main_ + ] + main_tree = "\n".join( + f"{header_name}<{dir_.replace('-', '_')}/index>" + for header_name, dir_ in zip(main_headers, main_) + ) + main_grid = "\n".join( + f'- header: "**{header_name}**"\n content: "{_package_namespace(dir_).replace("_", "-")}: {_get_package_version(_package_dir(dir_))}"\n link: {dir_.replace("-", "_")}/index.html' + for header_name, dir_ in zip(main_headers, main_) + ) + doc += f"""## Base packages ```{{gallery-grid}} :grid-columns: "1 2 2 3" @@ -594,8 +567,37 @@ def _build_index(dirs: List[str]) -> None: {main_tree} ``` +""" + if integrations: + integration_headers = [ + " ".join( + custom_names.get(x, x.title().replace("ai", "AI").replace("db", "DB")) + for x in dir_.split("-") + ) + for dir_ in integrations + ] + integration_tree = "\n".join( + f"{header_name}<{dir_.replace('-', '_')}/index>" + for header_name, dir_ in zip(integration_headers, integrations) + ) -## Integrations + integration_grid = "" + integrations_to_show = [ + "openai", + "anthropic", + "google-vertexai", + "aws", + "huggingface", + "mistralai", + ] + for header_name, dir_ in sorted( + zip(integration_headers, integrations), + key=lambda h_d: integrations_to_show.index(h_d[1]) + if h_d[1] in integrations_to_show + else len(integrations_to_show), + )[: len(integrations_to_show)]: + integration_grid += f'\n- header: "**{header_name}**"\n content: {_package_namespace(dir_).replace("_", "-")} {_get_package_version(_package_dir(dir_))}\n link: {dir_.replace("-", "_")}/index.html' + doc += f"""## Integrations ```{{gallery-grid}} :grid-columns: "1 2 2 3" @@ -612,7 +614,6 @@ def _build_index(dirs: List[str]) -> None: {integration_tree} ``` - """ with open(HERE / "reference.md", "w") as f: f.write(doc) diff --git a/libs/langchain/langchain/agents/agent.py b/libs/langchain/langchain/agents/agent.py index 68170badd4770..16f648bea9ed9 100644 --- a/libs/langchain/langchain/agents/agent.py +++ b/libs/langchain/langchain/agents/agent.py @@ -629,7 +629,7 @@ async def aplan( @deprecated( "0.1.0", - alternative=( + message=( "Use new agent constructor methods like create_react_agent, create_json_agent, " "create_structured_chat_agent, etc." ), @@ -720,7 +720,7 @@ def tool_run_logging_kwargs(self) -> Dict: @deprecated( "0.1.0", - alternative=( + message=( "Use new agent constructor methods like create_react_agent, create_json_agent, " "create_structured_chat_agent, etc." ), diff --git a/libs/langchain/langchain/agents/agent_types.py b/libs/langchain/langchain/agents/agent_types.py index 9fbd6f95c4ed5..14844a2a38f97 100644 --- a/libs/langchain/langchain/agents/agent_types.py +++ b/libs/langchain/langchain/agents/agent_types.py @@ -7,7 +7,7 @@ @deprecated( "0.1.0", - alternative=( + message=( "Use new agent constructor methods like create_react_agent, create_json_agent, " "create_structured_chat_agent, etc." ), From 4255a30f2025f0a72f3fb2634b6e9a25f665a870 Mon Sep 17 00:00:00 2001 From: ZhangShenao <15201440436@163.com> Date: Mon, 19 Aug 2024 21:00:21 +0800 Subject: [PATCH 09/80] Improvement[Community] Improve api doc for `SingleFileFacebookMessengerChatLoader` (#25536) Delete redundant args in api doc --- .../langchain_community/chat_loaders/facebook_messenger.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/libs/community/langchain_community/chat_loaders/facebook_messenger.py b/libs/community/langchain_community/chat_loaders/facebook_messenger.py index bd7d1ba15cfd0..2bf883b0f0fe9 100644 --- a/libs/community/langchain_community/chat_loaders/facebook_messenger.py +++ b/libs/community/langchain_community/chat_loaders/facebook_messenger.py @@ -16,9 +16,6 @@ class SingleFileFacebookMessengerChatLoader(BaseChatLoader): Args: path (Union[Path, str]): The path to the chat file. - Attributes: - path (Path): The path to the chat file. - """ def __init__(self, path: Union[Path, str]) -> None: @@ -58,9 +55,6 @@ class FolderFacebookMessengerChatLoader(BaseChatLoader): path (Union[str, Path]): The path to the directory containing the chat files. - Attributes: - path (Path): The path to the directory containing the chat files. - """ def __init__(self, path: Union[str, Path]) -> None: From 32f514752327ec745b4186a92cad5d81ad87e155 Mon Sep 17 00:00:00 2001 From: maang-h <55082429+maang-h@users.noreply.github.com> Date: Mon, 19 Aug 2024 21:23:09 +0800 Subject: [PATCH 10/80] docs: Fix QianfanLLMEndpoint and Tongyi input text (#25529) - **Description:** Fix `QianfanLLMEndpoint` and `Tongyi` input text. --- .../llms/baidu_qianfan_endpoint.py | 26 ++++++++----------- .../langchain_community/llms/tongyi.py | 24 +++++++---------- 2 files changed, 20 insertions(+), 30 deletions(-) diff --git a/libs/community/langchain_community/llms/baidu_qianfan_endpoint.py b/libs/community/langchain_community/llms/baidu_qianfan_endpoint.py index c6e26fbb191ea..601f952bddbb4 100644 --- a/libs/community/langchain_community/llms/baidu_qianfan_endpoint.py +++ b/libs/community/langchain_community/llms/baidu_qianfan_endpoint.py @@ -69,30 +69,26 @@ class QianfanLLMEndpoint(LLM): Invoke: .. code-block:: python - messages = [ - ("system", "你是一名专业的翻译家,可以将用户的中文翻译为英文。"), - ("human", "我喜欢编程。"), - ] - llm.invoke(messages) + input_text = "用50个字左右阐述,生命的意义在于" + llm.invoke(input_text) .. code-block:: python - 'I like programming.' + '生命的意义在于体验、成长、爱与被爱、贡献与传承,以及对未知的勇敢探索与自我超越。' Stream: .. code-block:: python - for chunk in llm.stream(messages): + for chunk in llm.stream(input_text): print(chunk) .. code-block:: python - I like - programming. + 生命的意义 | 在于不断探索 | 与成长 | ,实现 | 自我价值,| 给予爱 | 并接受 | 爱, | 在经历 | 中感悟 | ,让 | 短暂的存在 | 绽放出无限 | 的光彩 | 与温暖 | 。 .. code-block:: python - stream = llm.stream(messages) + stream = llm.stream(input_text) full = next(stream) for chunk in stream: full += chunk @@ -100,23 +96,23 @@ class QianfanLLMEndpoint(LLM): .. code-block:: - 'I like programming.' + '生命的意义在于探索、成长、爱与被爱、贡献价值、体验世界之美,以及在有限的时间里追求内心的平和与幸福。' Async: .. code-block:: python - await llm.ainvoke(messages) + await llm.ainvoke(input_text) # stream: - # async for chunk in llm.astream(messages): + # async for chunk in llm.astream(input_text): # print(chunk) # batch: - # await llm.abatch([messages]) + # await llm.abatch([input_text]) .. code-block:: python - 'I like programming.' + '生命的意义在于探索、成长、爱与被爱、贡献社会,在有限的时间里追寻无限的可能,实现自我价值,让生活充满色彩与意义。' """ # noqa: E501 diff --git a/libs/community/langchain_community/llms/tongyi.py b/libs/community/langchain_community/llms/tongyi.py index c501e5424cb51..b4d79a52c1c2c 100644 --- a/libs/community/langchain_community/llms/tongyi.py +++ b/libs/community/langchain_community/llms/tongyi.py @@ -199,44 +199,38 @@ class Tongyi(BaseLLM): Invoke: .. code-block:: python - messages = [ - ("system", "你是一名专业的翻译家,可以将用户的中文翻译为英文。"), - ("human", "我喜欢编程。"), - ] - llm.invoke(messages) + input_text = "用50个字左右阐述,生命的意义在于" + llm.invoke(input_text) .. code-block:: python - 'I enjoy programming.' + '探索、成长、连接与爱——在有限的时间里,不断学习、体验、贡献并寻找与世界和谐共存之道,让每一刻充满价值与意义。' Stream: .. code-block:: python - for chunk in llm.stream(messages): + for chunk in llm.stream(input_text): print(chunk) .. code-block:: python - I - enjoy - programming - . + 探索 | 、 | 成长 | 、连接与爱。 | 在有限的时间里,寻找个人价值, | 贡献于他人,共同体验世界的美好 | ,让世界因自己的存在而更 | 温暖。 Async: .. code-block:: python - await llm.ainvoke(messages) + await llm.ainvoke(input_text) # stream: - # async for chunk in llm.astream(messages): + # async for chunk in llm.astream(input_text): # print(chunk) # batch: - # await llm.abatch([messages]) + # await llm.abatch([input_text]) .. code-block:: python - 'I enjoy programming.' + '探索、成长、连接与爱。在有限的时间里,寻找个人价值,贡献于他人和社会,体验丰富多彩的情感与经历,不断学习进步,让世界因自己的存在而更美好。' """ # noqa: E501 From 09c0823c3aaf29a46d74e2622dd4da0fadc3edf9 Mon Sep 17 00:00:00 2001 From: ccurme Date: Mon, 19 Aug 2024 09:29:25 -0400 Subject: [PATCH 11/80] docs: update summarization guides (#25408) --- docs/docs/how_to/index.mdx | 9 + docs/docs/how_to/summarize_map_reduce.ipynb | 449 ++++++++++++++ docs/docs/how_to/summarize_refine.ipynb | 333 ++++++++++ docs/docs/how_to/summarize_stuff.ipynb | 209 +++++++ docs/docs/tutorials/summarization.ipynb | 634 +++++++++++--------- 5 files changed, 1344 insertions(+), 290 deletions(-) create mode 100644 docs/docs/how_to/summarize_map_reduce.ipynb create mode 100644 docs/docs/how_to/summarize_refine.ipynb create mode 100644 docs/docs/how_to/summarize_stuff.ipynb diff --git a/docs/docs/how_to/index.mdx b/docs/docs/how_to/index.mdx index de9000095fb28..05f90a7d24dbd 100644 --- a/docs/docs/how_to/index.mdx +++ b/docs/docs/how_to/index.mdx @@ -315,6 +315,15 @@ For a high-level tutorial, check out [this guide](/docs/tutorials/graph/). - [How to: improve results with prompting](/docs/how_to/graph_prompting) - [How to: construct knowledge graphs](/docs/how_to/graph_constructing) +### Summarization + +LLMs can summarize and otherwise distill desired information from text, including +large volumes of text. For a high-level tutorial, check out [this guide](/docs/tutorials/summarization). + +- [How to: summarize text in a single LLM call](/docs/how_to/summarize_stuff) +- [How to: summarize text through parallelization](/docs/how_to/summarize_map_reduce) +- [How to: summarize text through iterative refinement](/docs/how_to/summarize_refine) + ## [LangGraph](https://langchain-ai.github.io/langgraph) LangGraph is an extension of LangChain aimed at diff --git a/docs/docs/how_to/summarize_map_reduce.ipynb b/docs/docs/how_to/summarize_map_reduce.ipynb new file mode 100644 index 0000000000000..71ffa176e09ce --- /dev/null +++ b/docs/docs/how_to/summarize_map_reduce.ipynb @@ -0,0 +1,449 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "c47f5b2f-e14c-43e7-a0ab-d71562636624", + "metadata": {}, + "source": [ + "---\n", + "sidebar_position: 3\n", + "keywords: [summarize, summarization, map reduce]\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "682a4f53-27db-43ef-a909-dd9ded76051b", + "metadata": {}, + "source": [ + "# How to summarize text through parallelization\n", + "\n", + "LLMs can summarize and otherwise distill desired information from text, including large volumes of text. In many cases, especially when the amount of text is large compared to the size of the model's context window, it can be helpful (or necessary) to break up the summarization task into smaller components.\n", + "\n", + "Map-reduce represents one class of strategies for accomplishing this. The idea is to break the text into \"sub-documents\", and first map each sub-document to an individual summary using an LLM. Then, we reduce or consolidate those summaries into a single global summary.\n", + "\n", + "Note that the map step is typically parallelized over the input documents. This strategy is especially effective when understanding of a sub-document does not rely on preceeding context. For example, when summarizing a corpus of many, shorter documents.\n", + "\n", + "[LangGraph](https://langchain-ai.github.io/langgraph/), built on top of `langchain-core`, suports [map-reduce](https://langchain-ai.github.io/langgraph/how-tos/map-reduce/) workflows and is well-suited to this problem:\n", + "\n", + "- LangGraph allows for individual steps (such as successive summarizations) to be streamed, allowing for greater control of execution;\n", + "- LangGraph's [checkpointing](https://langchain-ai.github.io/langgraph/how-tos/persistence/) supports error recovery, extending with human-in-the-loop workflows, and easier incorporation into conversational applications.\n", + "- The LangGraph implementation is straightforward to modify and extend.\n", + "\n", + "Below, we demonstrate how to summarize text via a map-reduce strategy." + ] + }, + { + "cell_type": "markdown", + "id": "4aa52e84-d1b5-4b33-b4c4-541156686ef3", + "metadata": {}, + "source": [ + "## Load chat model\n", + "\n", + "Let's first load a chat model:\n", + "```{=mdx}\n", + "import ChatModelTabs from \"@theme/ChatModelTabs\";\n", + "\n", + "\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "e5f426fc-cea6-4351-8931-1e422d3c8b69", + "metadata": {}, + "outputs": [], + "source": [ + "# | output: false\n", + "# | echo: false\n", + "\n", + "from langchain_openai import ChatOpenAI\n", + "\n", + "llm = ChatOpenAI(model=\"gpt-4o-mini\", temperature=0)" + ] + }, + { + "cell_type": "markdown", + "id": "b137fe82-0a53-4910-b53e-b87a297f329d", + "metadata": {}, + "source": [ + "## Load documents\n", + "\n", + "First we load in our documents. We will use [WebBaseLoader](https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.web_base.WebBaseLoader.html) to load a blog post, and split the documents into smaller sub-documents." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "27c8fed0-b2d7-4549-a086-f5ee657efc41", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Created a chunk of size 1003, which is longer than the specified 1000\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Generated 14 documents.\n" + ] + } + ], + "source": [ + "from langchain_community.document_loaders import WebBaseLoader\n", + "from langchain_text_splitters import CharacterTextSplitter\n", + "\n", + "text_splitter = CharacterTextSplitter.from_tiktoken_encoder(\n", + " chunk_size=1000, chunk_overlap=0\n", + ")\n", + "\n", + "loader = WebBaseLoader(\"https://lilianweng.github.io/posts/2023-06-23-agent/\")\n", + "docs = loader.load()\n", + "\n", + "split_docs = text_splitter.split_documents(docs)\n", + "print(f\"Generated {len(split_docs)} documents.\")" + ] + }, + { + "cell_type": "markdown", + "id": "84216044-6f1e-4b90-b4fa-29ec305abf51", + "metadata": {}, + "source": [ + "## Create graph\n", + "\n", + "### Map step\n", + "Let's first define the prompt associated with the map step, and associated it with the LLM via a [chain](/docs/how_to/sequence/):" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "669afa40-2708-4fa1-841e-c74a67bd9175", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_core.output_parsers import StrOutputParser\n", + "from langchain_core.prompts import ChatPromptTemplate\n", + "\n", + "map_prompt = ChatPromptTemplate.from_messages(\n", + " [(\"human\", \"Write a concise summary of the following:\\\\n\\\\n{context}\")]\n", + ")\n", + "\n", + "map_chain = map_prompt | llm | StrOutputParser()" + ] + }, + { + "cell_type": "markdown", + "id": "81597ed0-8df5-4cbc-a242-3140a168a7f4", + "metadata": {}, + "source": [ + "### Reduce step\n", + "\n", + "We also define a chain that takes the document mapping results and reduces them into a single output." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "de59caae-8fb2-4cf4-aea0-be78a081a695", + "metadata": {}, + "outputs": [], + "source": [ + "reduce_template = \"\"\"\n", + "The following is a set of summaries:\n", + "{docs}\n", + "Take these and distill it into a final, consolidated summary\n", + "of the main themes.\n", + "\"\"\"\n", + "\n", + "reduce_prompt = ChatPromptTemplate([(\"human\", reduce_template)])\n", + "\n", + "reduce_chain = reduce_prompt | llm | StrOutputParser()" + ] + }, + { + "cell_type": "markdown", + "id": "cb264a71-12f5-44ef-ad2e-d38c4bf71bbd", + "metadata": {}, + "source": [ + "### Orchestration via LangGraph\n", + "\n", + "Below we implement a simple application that maps the summarization step on a list of documents, then reduces them using the above prompts.\n", + "\n", + "Map-reduce flows are particularly useful when texts are long compared to the context window of a LLM. For long texts, we need a mechanism that ensures that the context to be summarized in the reduce step does not exceed a model's context window size. Here we implement a recursive \"collapsing\" of the summaries: the inputs are partitioned based on a token limit, and summaries are generated of the partitions. This step is repeated until the total length of the summaries is within a desired limit, allowing for the summarization of arbitrary-length text.\n", + "\n", + "We will need to install `langgraph`:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6dc8cf11-c0e5-4448-a921-9377acad1df0", + "metadata": {}, + "outputs": [], + "source": [ + "pip install -qU langgraph" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "dafedc2e-feeb-44bc-9f38-e55394953de5", + "metadata": {}, + "outputs": [], + "source": [ + "import operator\n", + "from typing import Annotated, List, Literal, TypedDict\n", + "\n", + "from langchain.chains.combine_documents.reduce import (\n", + " acollapse_docs,\n", + " split_list_of_docs,\n", + ")\n", + "from langchain_core.documents import Document\n", + "from langgraph.constants import Send\n", + "from langgraph.graph import END, START, StateGraph\n", + "\n", + "token_max = 1000\n", + "\n", + "\n", + "def length_function(documents: List[Document]) -> int:\n", + " \"\"\"Get number of tokens for input contents.\"\"\"\n", + " return sum(llm.get_num_tokens(doc.page_content) for doc in documents)\n", + "\n", + "\n", + "# This will be the overall state of the main graph.\n", + "# It will contain the input document contents, corresponding\n", + "# summaries, and a final summary.\n", + "class OverallState(TypedDict):\n", + " # Notice here we use the operator.add\n", + " # This is because we want combine all the summaries we generate\n", + " # from individual nodes back into one list - this is essentially\n", + " # the \"reduce\" part\n", + " contents: List[str]\n", + " summaries: Annotated[list, operator.add]\n", + " collapsed_summaries: List[Document]\n", + " final_summary: str\n", + "\n", + "\n", + "# This will be the state of the node that we will \"map\" all\n", + "# documents to in order to generate summaries\n", + "class SummaryState(TypedDict):\n", + " content: str\n", + "\n", + "\n", + "# Here we generate a summary, given a document\n", + "async def generate_summary(state: SummaryState):\n", + " response = await map_chain.ainvoke(state[\"content\"])\n", + " return {\"summaries\": [response]}\n", + "\n", + "\n", + "# Here we define the logic to map out over the documents\n", + "# We will use this an edge in the graph\n", + "def map_summaries(state: OverallState):\n", + " # We will return a list of `Send` objects\n", + " # Each `Send` object consists of the name of a node in the graph\n", + " # as well as the state to send to that node\n", + " return [\n", + " Send(\"generate_summary\", {\"content\": content}) for content in state[\"contents\"]\n", + " ]\n", + "\n", + "\n", + "def collect_summaries(state: OverallState):\n", + " return {\n", + " \"collapsed_summaries\": [Document(summary) for summary in state[\"summaries\"]]\n", + " }\n", + "\n", + "\n", + "# Add node to collapse summaries\n", + "async def collapse_summaries(state: OverallState):\n", + " doc_lists = split_list_of_docs(\n", + " state[\"collapsed_summaries\"], length_function, token_max\n", + " )\n", + " results = []\n", + " for doc_list in doc_lists:\n", + " results.append(await acollapse_docs(doc_list, reduce_chain.ainvoke))\n", + "\n", + " return {\"collapsed_summaries\": results}\n", + "\n", + "\n", + "# This represents a conditional edge in the graph that determines\n", + "# if we should collapse the summaries or not\n", + "def should_collapse(\n", + " state: OverallState,\n", + ") -> Literal[\"collapse_summaries\", \"generate_final_summary\"]:\n", + " num_tokens = length_function(state[\"collapsed_summaries\"])\n", + " if num_tokens > token_max:\n", + " return \"collapse_summaries\"\n", + " else:\n", + " return \"generate_final_summary\"\n", + "\n", + "\n", + "# Here we will generate the final summary\n", + "async def generate_final_summary(state: OverallState):\n", + " response = await reduce_chain.ainvoke(state[\"collapsed_summaries\"])\n", + " return {\"final_summary\": response}\n", + "\n", + "\n", + "# Construct the graph\n", + "# Nodes:\n", + "graph = StateGraph(OverallState)\n", + "graph.add_node(\"generate_summary\", generate_summary) # same as before\n", + "graph.add_node(\"collect_summaries\", collect_summaries)\n", + "graph.add_node(\"collapse_summaries\", collapse_summaries)\n", + "graph.add_node(\"generate_final_summary\", generate_final_summary)\n", + "\n", + "# Edges:\n", + "graph.add_conditional_edges(START, map_summaries, [\"generate_summary\"])\n", + "graph.add_edge(\"generate_summary\", \"collect_summaries\")\n", + "graph.add_conditional_edges(\"collect_summaries\", should_collapse)\n", + "graph.add_conditional_edges(\"collapse_summaries\", should_collapse)\n", + "graph.add_edge(\"generate_final_summary\", END)\n", + "\n", + "app = graph.compile()" + ] + }, + { + "cell_type": "markdown", + "id": "c2de9413-fa18-4807-9c1f-85a62a8eb7ab", + "metadata": {}, + "source": [ + "LangGraph allows the graph structure to be plotted to help visualize its function:" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "4f26c1e3-3d3c-44f7-bb5f-46db9dc40f4b", + "metadata": {}, + "outputs": [ + { + "data": { + "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/4gHYSUNDX1BST0ZJTEUAAQEAAAHIAAAAAAQwAABtbnRyUkdCIFhZWiAH4AABAAEAAAAAAABhY3NwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAA9tYAAQAAAADTLQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlkZXNjAAAA8AAAACRyWFlaAAABFAAAABRnWFlaAAABKAAAABRiWFlaAAABPAAAABR3dHB0AAABUAAAABRyVFJDAAABZAAAAChnVFJDAAABZAAAAChiVFJDAAABZAAAAChjcHJ0AAABjAAAADxtbHVjAAAAAAAAAAEAAAAMZW5VUwAAAAgAAAAcAHMAUgBHAEJYWVogAAAAAAAAb6IAADj1AAADkFhZWiAAAAAAAABimQAAt4UAABjaWFlaIAAAAAAAACSgAAAPhAAAts9YWVogAAAAAAAA9tYAAQAAAADTLXBhcmEAAAAAAAQAAAACZmYAAPKnAAANWQAAE9AAAApbAAAAAAAAAABtbHVjAAAAAAAAAAEAAAAMZW5VUwAAACAAAAAcAEcAbwBvAGcAbABlACAASQBuAGMALgAgADIAMAAxADb/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsKCwsNDhIQDQ4RDgsLEBYQERMUFRUVDA8XGBYUGBIUFRT/2wBDAQMEBAUEBQkFBQkUDQsNFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBT/wAARCAHXARsDASIAAhEBAxEB/8QAHQABAAMAAwEBAQAAAAAAAAAAAAUGBwMECAECCf/EAFcQAAEEAQIDAggHCgoJAwMFAAEAAgMEBQYRBxIhEzEIFBYiQVFWlBUXVZOV0dMyQlJTVGGBs9LUCSM3OHF1dpKhtDM0NmJygpGxsiQ1dCZEw3ODheHw/8QAGgEBAQEBAQEBAAAAAAAAAAAAAAECBAMFB//EADMRAQABAgIHBQcFAQEAAAAAAAABAhEDkRIUIVFSYdEEEzFToSNBcbHB0uEVM4Gi8EIy/9oADAMBAAIRAxEAPwD+qaIiAiIgIiICIuhmsxFhaYmfHJYle9sUNaAAyTSOPRjQSB6ySSA0AuJABIsRNU2gd9R02o8TXeWS5SlE8fevsMB/xKifI92dHballGQc4f8At0TnClEN/ueXp2p9Bc/v6kNYDyqRj0jgoW8seFxzG777Nqxgb/8ARe+jhU7Kpmfh/vo1sffKrCfLFD3pn1p5VYT5Yoe9M+tffJbC/JFD3Zn1J5LYX5Ioe7M+pPY8/Q2PnlVhPlih70z608qsJ8sUPemfWvvkthfkih7sz6k8lsL8kUPdmfUnsefobHzyqwnyxQ96Z9aeVWE+WKHvTPrX3yWwvyRQ92Z9SeS2F+SKHuzPqT2PP0Nj9RakxE7w2LKUpHH71lhhP/dSSiZNJYKZhZJhce9h6lrqsZH/AGUb5Eswn8dpmb4Hkb18RBJpS/7pi7o/+KPlI6b8wHKWjhVbImY+Ph/v4TYtCKOwmZZma0jjDJVswvMVirLtzwvHoO3QggggjoQQR3qRXjVTNM2lBERZBERAREQEREBERAREQEREBERAREQEREBERAVYrbZfX9x79nQ4etHFC0+iabd0jvVvyNiAPeOZ46bnezqsYUeJ651JXfuDajrXozt0cOQxOAPrBiG//EPWujC8K599vrEfK6x71nRdTK5ajgsbZyGSuV8fQrMMs9q1K2KKJg73Oe4gNA9ZKpQ8IThYe7iXo8//AM9V+0XOi/Pe2NjnuIa1o3JPoCxat4SsWqOHGpNVaa0hqSanRxU+Sxt29Sjjq5FrNwHRntgeXccxa/kcWgkDdW6vx84ZXJ469biLpOzZlcI4oYs5Vc+RxOwa0CTqSdgAse0Dwo1jNntXVa+lH8M9H5nT9unYwUmYjv035OZ2zbFWOMnsWBpfzbBnNu3zNxugv+h+N+Vy3BrB6tyehdTz5K3BVacfj6kEstt8kDXmeFrZy1sBJOxkcwj0gdN/tnwn9K0eHdrV9rH5ytXpZiPBX8ZJSHj9K2+RjOSSIO67dox3mF27XDl5j0Wb3NHcSc9wd0FpvKaEtNraYnpVczga+crM+H6sVZ8RMcjZABGJBFIYpSzmA2Pd1isHwK1fQ0tqLFVtEVdP1bmvsPqSljqV6u+GGkx9Xtm/dNAfGIHFzQNiXbML+9Bf9aeEVqTA624e42pw41IaudkvizRmip+OyCGEuYIv/Vhjeuz3c5Hmjp16LemO5mNcWlpI35T3hZLxr01qZ+tOHOsdNYPymk03cueNYmO3FWmlisVnRc7HylrN2O5SQSNweinDx84d0ia+W11pbD5SL+Lt461naglqzDo+J47T7prt2n84KDQEVBf4QPC6JwD+JOkGEgO2dnao6Ebg/wCk9IIKuWIzFDUGMr5HF3q2Sx9lnaQW6crZYpW/hNe0kOH5wUEJktsRrrEWWbNZlo5KE46+fJGx00TvV0a2cfn5h6lZ1WNRt8c1bpOqwEugnnyD9huAxkD4ep9HnWG/07H86s66MX/zRPL6z9Fn3CIi50EREBERAREQEREBERAREQEREBERAREQEREBQuoMTPZmp5LHiP4VolwiEri1ksT9u0icR3B3K0g9dnMYdiAQZpFqmqaJvB4IzEZylqGCQRbtmj82xTsN5ZoHfgyM9Hcdj3EdQSCCu18G1PyWD5sfUulmtLYvPyRy3K29mNpbHbgkdDPGCdyGysIe0b7HYHboFHO0PICez1LnYm778otMd/i5hP8AivbRwqtsVW+PX8LsT4x1RpBFaEEdQRGF2FVvIif2pz3z8X2SeRE/tTnvn4vsk7vD4/SVtG9aUVF1LojOeTmV+AtU5b4b8Ul8R8bnj7HxjkPZ8+0W/Lzcu+3o3XX0ZojUnklh/KfVOT8ovFI/hHxCePxfxjlHadnvFvy82+2/oTu8Pj9JLRvaEuu7H1XuLnVoXOJ3JMY3Kr3kRP7U575+L7JPIif2pz3z8X2Sd3h8fpJaN6wfBtT8lg+bH1Lq5fOUNOVojYkbG6Q8letEN5Z3fgRsHVx/MO7vOwBKihoiQjaTUudkbvvsbLG/4tYD/ipDC6TxeBmknq13OtyDlfbsyvnnePUZHku2/Nvt+ZNHCp2zVf4R9Z6SbHHgMVYbbtZfJMYzJW2tj7JjuZteFpJZGD6T5xLiO8n1AKcRF5V1TXN5SdoiIsIIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiIK7xGrY65w91RBl70mMxMuLtMuXofu68JicJJG9D1a3cjoe7uXR4P08Pj+FWkaun8nNmsHDi67KORsb9pZhEYDJHbgdXDY9w7+5SOv7MNPQmpLFjEnPQRY2zJJimt5jdaInEwAbHfnHm7bH7ruK6fCm5XyPDPS1qpgHaWrTY2vJFhHs5DQaYwRCW7Dbk+522Hd3ILWiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiIInVseWm0rmY8BLFBnXUpm4+WcbxssFh7Iu3B80P5Seh6ehdfQUOfr6JwUWq54LOpmUom5KaqAIn2OUdoWbADYu326D+hcfEatjrnD3VEGXvSYzEy4u0y5eh+7rwmJwkkb0PVrdyOh7u5dHg/Tw+P4VaRq6fyc2awcOLrso5Gxv2lmERgMkduB1cNj3Dv7kFwREQEREBERAREQEREBERAREQEREBERAREQEREBERARFD6h1CMKK8MMBuZC04tgrB3ICBtzPc7Y8rGgjc7HvAAJIB1TTNc6NPiJhFSTnNXk7ihhAPUbUx2/T2fVfPhzWH5Dg/epvs11arXvjOFsu6KkfDmsPyHB+9TfZp8Oaw/IcH71N9mmq174zgsu6KkfDmsPyHB+9TfZp8Oaw/IcH71N9mmq174zgsu6KkfDmsPyHB+9TfZp8Oaw/IcH71N9mmq174zgs8Hfwo3Ax2N1Di+KOMrk1skGY7Llo35Z2N2hkP8AxRt5N+4dk30uUZ/BdcFJM/r7JcSrsbmUMCx9LHu6gSW5Yy2Qg+kMieQQfxzT6F7Y4r6RzfF/h5nNH5rH4TxDKVzEZG2ZS6F4IdHI3ePbmY8NcN+m7eq6vBjQeb4I8NsLo7DU8LLVx8ZD7MliUPsSuJdJI7aPvc4np12Gw7gmq174zgs21FSPhzWH5Dg/epvs0+HNYfkOD96m+zTVa98ZwWXdFSPhzWH5Dg/epvs0+HNYfkOD96m+zTVa98ZwWXdFSPhzWH5Dg/epvs0+HNYfkOD96m+zTVa98ZwWXdFSPhzWH5Dg/epvs0+HNYD/AOxwZ/N41MP/AMaarXvjOCy7ooTTuo3Zh1irareI5OsGmauH9owtdvyvY/YczTykb7AggggembXNXRVROjV4oIiLAIiICIiAiIgIiICIiAiIgKlaiO/EbCj0DFXdvzfx1X/+v+iuqpOov5R8N/VNz9dWXZ2X9z+J+UtQk0WTceNcZbTF7RGHx+bh0nV1BlH07mo54Y5BTayCSVrGiUGMPlcwMaXggdehOyyOnxw13W0XXoVcna1Rm83rK9g8dnaVKoTLRrxFxmrROdFC5x7JwHO8t5jIRzANavSaoibMvWqLyzkeIPGDTuCfWyBv4sWdQ4bH4zN5/H0PGZWWZzFYjlhrSvjIZ5hDm8hPMR023Xc1fxl1hwfHErEXMm7WF7FV8RPhrlqpBDKH3p31yyVsXZxuDHsDh9zvvyl3pTSgemkXmevrLi9prF6ss5KDOy4mvprIXWZXUFDF15aV6KIvh7NtWaRsjHedu17NwWt85wJUrNqTVen+DGGz+e4iXxn9Sx49tOHGYKrYeyxIwvNerDyDne8Hq6Vzmt7Mu2aNwGkPQMkjIY3SSOaxjRu5zjsAPWSv0vGevNZ6u1t4N/FzD6kv5CrlNNZKtXNi1SqwWrNeQV5WMnjiMkTXDtfuoyNw1vd5wOkcTdZ630jn9JcPcJlc7nsxdp28nezlOhjn5B0McjGsYyOUw1h1lALuUkBo80klwaQ9CIs74I5XW2T03kGa5x1mnerX3xU7FyOvFPbq8rHMkljgkkjY/mL2kNdseQHYb7Lq+EBmNY4PSWOs6Q8cjAyUTctaxlJl27Wo8r+0kggeC2Rwd2e42ceUuIaSrfZcaciw7RHEy/nOIPD3HUdWs1Vp/K6dyV6e+ylHB43NDYrsY9zQ0GNzA97HMHKN992gjYVKrxP11qPUmGwdXU3wYclrvUGDfbbQglfFTqxzPiYwOZtzNEYAc4Hr1dz9xmlA9PIvJ8/EHiXp7RmuNT2dc/CTdFanGH8RkxNaNmTriWvzOnc1u7ZOWxsDFyAcg3B3O3e4i8QuIVOrxqzuK1h8G1dD3I3UMb8GV5Y52eJ15nxzPc3mLSXu25S1wLju4jlDWkPUSLA4ddak0DrrKYbVOtY72Jm0dY1GMraxsMXwZLDKyN/KyIN549pQ4MdzO8zbmO6qejOMGu6eos5isnkM1kKNrSN3P4vIZ/DVKE7JYXMAdHHC47xkSg8szQ8Fo33BKaUD1Qi82VNW8QtM8E9I8TsxrGXMRSR4rJ5rGMx1aOBlCVgFgsLY+fna2Zkrjzbbwu5Q1ruVaZwv1bldcav19fdcEmlqOSZh8TXbGwAyQMHjU3OBzODpXlg3JA7HoBud7FVxc8OduJUw9eIbv+f+OO3/AHP/AFV3VHw/8pc39UD9cVeF5dq/9x8IakREXGyIiICIiAiIgIiICIiAiIgKk6i/lHw39U3P11ZXZVbVuLtNyePzdOu646pFLWnqx7do6KQscXM373NdG3zdxuC7bchoPV2aYjE27p+UrDK/CQ0Tktc6RxlTG4nKZowZBtiWrisjUqyFoY8AltuN8MoBIIa8DY7OBBaFAaH4NZ3WXDiTC8Rpb9OSllW3dOzQ264ymKYxjRG4zVo2xdoHGXblaRyuAO/o1t2sYGHZ2KzoO3UDDWjt+kR7L55Z1/krPfQlv7NdncVzN9GV0ZVb4j6NrBVMbldTakzrq+aqZ1tzJ3I5JjNXex8bOkYY2MmMbtY1u+5O4J3XZ1LwR0zrDJ6rt5mKxfj1Ljq2MvVHyARCOB8j43R7AOa8OlJ5uY7FrSNtutg8s6/yVnvoS39mnlnX+Ss99CW/s1e4r4TRncq2K4JQUtP5/EZDWGq9RVsxjpMW92YyDJXV4Xtc0mMCNrefZx89wc47Dcld3UnB7Eak0bp7T7r2Sx50++vNjMpRmYy3WlhjMbJA4sLCSxzmkFhaQ49FOeWdf5Kz30Jb+zTyzr/JWe+hLf2adxXwyaM7lKpeDnpqDCa0xVy/mczW1fHGMq7I3BJI+VjC3tmODQWvI5Og80dmzla0Ag/cl4PuPy+Nwrbmq9UTZ7DTSS0NTeOxNyUDZGhskXOIgx0bg0btcw77b96unlnX+Ss99CW/s08s6/yVnvoS39mncV8MmjO5ANxmrNBYehi9NVG6zY0ySWMhqjUEkFkvc/m721pA4dT0AaGgAAbd3Xuaf1dxFx5qahdNoF1Wdlird0jnzYmldyva5kglqMbybOB5SHAnY9C0FT+Q4hY3FULN27TzNSnWidNPYnw9pkcUbQS5znGPYAAEknuAX4xPEnE57GVcjja2XyGPtxNmr2q2IsyRTRuG7XNcIyHAjqCE7jE4ZTRlUofBw09jcVpqvh8tnMFfwJteL5elaYbcosv57ImMkb2P7R+zju3oQOXlXNpXwd9OaRt4KzUyGYsS4fM3s5A65ZbK6Se3E+OUSOLOZzQJHEdebfYlzuu9y8s6/wAlZ76Et/Zp5Z1/krPfQlv7NO4r4V0Z3KvkuBGAymlNY6fluZJtLVOWOYuyMljEkcxMJ5YyWbBn8Qzo4OPV3Xu2/eZ4HYHOYfiFjZ7eRZBrd4kyLo5Iw6I9hHB/E7sIb5sbT5wd1J9HRWXyzr/JWe+hLf2aeWdf5Kz30Jb+zTuK+E0Z3IHVXBbTutMzPkMt41ZFjT9jTUtXtA2J9WaSN73dG8wkBibs4OAHXpvsRA0fBvxMGUjydzVOqMzkWYuzhjZyN2KQupzMDTEWiINHKWh4cAHFwHMXDor55Z1/krPfQlv7NPLOv8lZ76Et/Zp3FfCmjO5WdWaVt6a4LN0dprBv1U2PFswUVW7bjg5oOx7HtJpCACA0Au5W7nc7NUnwc4dw8J+GGnNJxPbM7GVGxzzM32lnO7pZBv186Rz3dfWpPyzr/JWe+hLf2aeWVc92KzxP9S2h/wDjTuMTx0ZXRnc7WH/lLm/qgfrirwqppXG2rOYtZ23WkoiWuyrXrTbdqGBznOe8DflLiRs3fcBoJ2JLRa1x9pmJrtHuiEkREXIgiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiIKNx1/kR4hf2dyP8AlpFEeC7/ADcOGX9naP6lql+Ov8iPEL+zuR/y0iiPBd/m4cMv7O0f1LUGoIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiCjcdf5EeIX9ncj/AJaRRHgu/wA3Dhl/Z2j+papfjr/IjxC/s7kf8tIojwXf5uHDL+ztH9S1BqCIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAi432Io3cr5WMPqc4BfnxyD8fH/AHwraRzIuHxyD8fH/fCeOQfj4/74S0jmRcPjkH4+P++E8cg/Hx/3wlpHMi4fHIPx8f8AfCeOQfj4/wC+EtI8Y+Gp4ZGR4RZPUvDa3oA3KmbwskdPOfC3Zh8c8Lo3P7LsD1Y/nHLz9eUHcc3SM8BzwxrvEG5ozhJV0E+OviMOIbeeblecRxV4eUSmHsRsHydmzbn6GQdTt1vn8IXwVg4t8Fpc3jgyXUWlee/AGEF0tcgeMR/3Wh49O8ew+6UT/BwcE4eGnCN+rcoyOLPaq5Z2CQgPhpN/0Le/pz7mQ7d4czfq1LSPYKLh8cg/Hx/3wnjkH4+P++EtI5kXD45B+Pj/AL4TxyD8fH/fCWkcyLh8cg/Hx/3wnjkH4+P++EtI5kXD45B+Pj/vhfplmKRwa2VjnH0BwJS0jkREUBERAREQEREBERAREQEREBERAVa15kbFHF1IK0zq0l+5FUM0Z2exrty4tOx2dytIB9BO6sqqHEb/AEWnv63h/wDCRdPZoicWmJWPFFt4faYA87T2Mld6XzVGSPcfWXOBJP5yd19+L7S3s3iPcIv2VLZLJVMNjrN+/Zip0qsbpp7E7wyOJjRu5znHoAACSSqPp/j9oLU1HJ3aOeDaWNq+O2bVypPVibB+Na+VjQ9n+80kFd/f4nHOZed6xfF9pb2bxHuEX7KfF9pb2bxHuEX7KplnwgNN53Q+ssppLJR38tgcRPkxSv1J6ziGxPfG8xytje6NxZtzN6H0FceL4rZa7q7hZipK9IV9U6ftZW65rH88csUdVzWxHm2Dd537hwcejeo67zWMTjnMvO9d/i+0t7N4j3CL9lPi+0t7N4j3CL9lV6Dj3oKzqluno9QxOyT7Rosd2EwrPsAkGFtgs7J0m4I5A8ncbbb9FyXuOmh8frA6XkzfaZptiOpJDWqTzRwzPIDI5JWMMcbySPNc4HqE7/E45zLzvTvxfaW9m8R7hF+ynxfaW9m8R7hF+yqbwy49YviPrLVmnIqV6nbw2SlpQvfRsiOeOOOJzpHSuibHG7mkcBGXcxDQ4bhwK02zZhpVpbFiVkEETDJJLI4NaxoG5JJ7gB6VYx8SfCucy870J8X2lvZvEe4Rfsp8X2lvZvEe4RfsqrY3wieH2W09lc5Vzr34nGNhfZtOx9pjQyZ/ZxPYHRgyNc7oHMDh0J32Vh1HxK03pLI2qOWyPilqriLGdmj7CR/LSgLRNLu1pB5S9vmjzjv0BTv8TjnMvO92Pi+0t7N4j3CL9lPi+0t7N4j3CL9lUxvhO8Nn2hWj1BLLZki7evDFi7b33I/w64ERNhvp3i5gACe4KTt8etB0sLgcs7Pslo55spxjq1aad9sx7c7GMYwuLwTtybc24I23B2nf4nHOZed6wfF9pb2bxHuEX7KfF9pb2bxHuEX7KqGI8JXhvnbVCClqQSuu2RSje6lZZGywXFogle6MNhlJGwjkLXHcbDqFI5TjtobCaybpW/nPFc0bEdTklqTiETSAGOMz8nZB7g5uzS/c7j1p3+JxzmXnenvi+0t7N4j3CL9lPi+0t7N4j3CL9lQ1/jXo7HaysaTkyc0uoq8sMU1Ctj7M74jK1ro3OMcbg1hD27vJ5RvsSCoLSnHjBWtD5PVebzlBmIjzNjHVJKtK3DKQ1/LHA6CVgldY23DmsYRuDsNgU7/E45zLzvXb4vtLezeI9wi/ZTyA0wAeXTuKYT98ynG0jruNiBuOoBVbHhA6AOlp9QHULG46C2yhIx1acWW2XdWQmsWdtzuHUN5NyOoGyt2ltUYzWeCq5nD2HWcfZ5uzkfC+JxLXFjgWPAc0hzSCCAeivf4k/wDc5l53pHQd6eerlKE877Rxl11Rk0ri+RzOzjlaHuPVxAlDeY7k8oJJJJNnVO4e/wCv6w/rgf5OqriuDtMRGLNuXrEE+IiIuZBERAREQEREBERAREQEREBVDiN/otPf1vD/AOEit6qPEVhNbBP+8jy0BcfVuHNH+LgP0rq7N+9SseKgeEfozK8QOCmp8HhIW2snYiikiqveGCz2c0crodz0HaNY5nXp53XoqHxP1DkOOHCjM4XB6I1RQvVPE8i6jm8YaUdrsLUUr6jXPOz3ObG4Dl3YenndV6GRe8xdHmfUuPznG7WefzOH0xmsHQg0Pk8GJM9SdRlu27RaY4Wsk2JYzkJL/ud3dCe9cmFhzc2W4EZx2mc/Tr1MLe09kGvouFjG2JGVo2SSx97Yuau89p9ztyu7iF6URTRHkDg9wyoUsLpzQms9KcRJc3jLbWTyMyF9+Bc6KUyRWmu7YQchLWP5QOYOP3PTdX3g9nMlwkdk9FZnRupbeSn1DctR5nHY11indis2XSNsyTg8rC1jwHteQ4CPoD0C9BIkU2GLcLZ7+juLPETAZLA5hrc9nnZihloqL3498LqcLSHTjzWPDoXN5XbEkt233Wt52GCxhMhFaqOyFZ9eRstRjeZ07C0gsA6blw3G3518z2AxmqcTYxeYoVspjbAAmqW4hJFIAQ4czT0OxAP6FVsRwM4dYDJ1sjjdDafoX6zxLBZrY2JkkTx3Oa4N3B/OFbTGwecJdPax1Bwx13o/TOG1VNomriac2Go6ro+LXq9mKy176UDnbOmjEUY5S7m2OzQ4hTvEi3l+J2sNU5LFaQ1PXx54Y5vGwzZDETV3T25HwlsDGOHMXnboNvO68vMASvVSLOiMNxunMpHxO4K2nYu42rj9K361uc13hlaV0dINjkdtsxx5H7NOxPK71FUrhpozPY/WHDKWzgsjWr0dSatmnfLUkYyvFM+YwvcSNmtfzDlJ2DtxtvuvU6K6I8sZ3Rmel4K8TqcWCyL79riJ4/VrspyGWaD4Wqv7aNu27mcjXO5x05QTvsCq/wAbMbrHVcevKeSxGustmoMzDNhaWJilbhm42GaGVsnmERzylrZCWu55OflDWjYFex0Umm4yfhphLdXjfxcy8+Os1qmSfiPFbc9d0bbDWU9nBjnAc3K4kEDuO4OxWI5fhvqJjK+fsYLUljF4jiHqC9cx+FknqZGSpZfIyK1XMbmSPDeYHzDu5j3bbglexkVmm48y29EaLs6HymdbpPibFYuZeo4XpfHLOahmrseYLkcc0j5WsZ2j2dW7ncgsLdita4E5TVmY4cUrOs4Z4sx207GPt1m1rE1dsrhBLNC3pHI6MNLmjuJ7h3LQEViLDo8Pf9f1h/XA/wAnVVxVQ4fMItark72S5fdp2PXarXYf8WkfoVvXh2n92fhHyhZERFyoIiICIiAiIgIiICIiAiIgLr5DH1srSmqXIWWK0zeV8bxuCP8A/eldhFYmYm8Cnu4f2mHlg1dnIIh9zHy1JOUermfA5x/pJJ/OvnkBf9s838zR/dlcUXTrOLyyjo1eVO8gL/tnm/maP7snkBf9s838zR/dlcV+ZHiONzyCQ0EkNBJ/QB1Kazicso6F5VDyAv8Atnm/maP7ss1uZqzrDK610foDiFen13p2CJ0jctj4DQimeSRHI9lZpJ2HXlPTmB67OAm6+Ty3hEaOxOU09lNT8M6MGZ7SbxrHsht5KrEdwGCTcxxyO5TuR1Ac0tIK1uKrDXlnkihjjkncHyvY0AyODQ0Fx9J5WtG59AA9Ca1icso6F5UbDcPM/FiqjMrrrJWsk2JoszU6dOGF8m3nFjHQvLW79wLifzru+QF/2zzfzNH92VxRNZxOWUdC8qd5AX/bPN/M0f3ZPIC/7Z5v5mj+7K4oms4nLKOheVO8gL/tnm/maP7suvkeHuakoWWUdcZWvddG4QS2KlOWNj9vNLmCBpcAdtwHDf1jvV5RNZxOWUdC8sAr5q/oGXRWnOJfEO1X1nqWaatWdhcdCKEsrXjkY1z67i0lr2bcxG7ubbZad5AX/bPN/M0f3ZWyerDZdE6aGOV0L+0jL2gljtiOYb9x2JG49ZWUZC9mOAGmdWajzeV1JxHxc2TFuvQrUY5beNryOHaNHLymSNhLndw5WtAA6EprOJyyjoXla/IC/wC2eb+Zo/uyeQF/2zzfzNH92VsqWW3KsNhjZGMlY17WyxujeARvs5rgC0+sEAj0rlTWcTllHQvKneQF/wBs838zR/dl+maBt77S6uzczD3sLKjN+vrbACP0FW9E1nE5ZR0Ly6mLxdXC0IaVKEQVohs1gJPedyST1JJJJJ3JJJJJK7aIuaZmZvLIiIoCIiAiIgIiICIiAiIgIiICIiAiKl8Y9Q6o0pw4y+W0ZhW6i1JW7F1XFvBIsAzMEjehBB7MvIO/QjfY9xDu8QNYWtJ6TzmRw2Gn1XmsdXbPHgqErRYnLjs0de4HZx32JIY7YOI2NdxXDp+rNX6S4i6idlsRqGhiex8m48kX0KliVp7Zxaw8sjwHFnNvykNadtwCJnR/DDTmktR5/U+OxQq6g1G+OfJ2nyvkke5rQAwFxPK0dTyt2G57u7a4ICIiAiIgIiICIiAiIgz/ACfDZmI4g5biNibGXu5yXDOpHA/CJZRuvZ50J5Heax4PM0O6NHaOJG5JMjw01rkdX6Ow2R1FgJ9HZ662QS4O9Mx0rHscWu5SPumnbmB2B5SCQFb1U9Y8LdM69zWm8vmsaLOU07b8dxltkr4pK8nTfZzSN2nYbtO4Ow3HRBbEVH4M6k1Xq3QVXJ61wTdN5+WxYbJjWgjso2zPbEepO5LA07+nfuCvCAiIgIiICIiAiIgIiICIiAiIgIiICIiAqRxqxmZzPDDOU9P6nh0ZmJWRivnLDg1lUiVhJJPraC3/AJld1mHhMeR3xHao+MDxzyQ7OHx/xDftuXt4+Tl26/d8n6N0GlVWubWhD3iV4YA54++O3euVcFHs/Eq/Y79j2beTfv5dun+C50BERAREQEREBERAREQEReEf4Ufgi/P6TxHEzHRF9rCAY7JbdSar3kxP/oZK9w//AHvzIPWXBDFZvC8OqNTUOq4da5Vs1gyZmu4OZK0zPLWgj8BpDP8AlV8X8dv4P7gvPxW4+4rKStkZhtKSR5izMzoDMx4NePf1ukaHbelsb1/YlAREQEREBERAREQEREBERAREQEREBERAVI41ZPM4bhhnLmn9MQ6zzETIzXwdhocy0TKwEEH1NJd/yq7qkcasZmczwwzlPT+p4dGZiVkYr5yw4NZVIlYSST62gt/5kFyquc6tCXsETywFzB96du5cq4qrXNrQh7xK8MAc8ffHbvXKgIiIK3mtV2K199DE48ZO3CAZ3Sz9hBDuAQ1z+VxLiDvytadhsTtu3eO8qNW+zmH+mpf3VdXTp5srqgnv+Fngn0naKID/AAAH6FOL6uhh4dqZoifDfu5TDWyEb5Uat9nMP9NS/uqeVGrfZzD/AE1L+6qSXTOZx7cu3Em9WGUdAbTaJmb25hDg0yBm/NyBxA5tttyAnsvLjOrqX5OHyo1b7OYf6al/dU8qNW+zmH+mpf3Vc2IzOP1Bjochi71bJUJwTFapzNlikAJB5XNJB6gjofQu4nsvLjOrqX5I3yo1b7OYf6al/dU8qNW+zmH+mpf3VSSJbC8uP7dS/JG+VGrfZzD/AE1L+6qL1Q/O6y05k8FltKYW3jMlWkqWYXZuXz43tLXD/Veh2Pf6FZkS2F5cf26l+TDfBi4LZjwZ9CWMDRxeIzF65bfauZN+TkhdMe6NvJ4u7ZrWgDbmPUuPTm2GweVGrfZzD/TUv7qpJEtheXH9upfkjfKjVvs5h/pqX91Tyo1b7OYf6al/dVJIlsLy4/t1L8kb5Uat9nMP9NS/uqeVGrfZzD/TUv7qpJEtheXH9upfkjfKjVvs5h/pqX91Tyo1b7OYf6al/dV2clkqeGx9i/kLUFGjWjMs9mzII4omAblznOIDQB3krnilZPEyWJ7ZI3tDmvYdw4HuIPpCey8uM6upfkj/ACo1b7OYf6al/dV+ma1y+P8A47NYOvVoN/0tihedaMQ/CcwxMPKPSRvsOuykF08y0Ow94OAcDBICCNwfNKsU4VU20IznqXjcZfi/orCaQy2qbGp8bJp/EyCG9fqTizHXkJYAx3Z8xDt5Gebtv5w9aiMtx0wVE6Dkx+OzmoqWszG7HXcNjnzwxRP7LaawTsYWATNcS4bgB3TzSF2+FOgNNaa4fY+ti8BjqFfIwQ3bkUFZjW2J3MYTJINvOduB1PqHqV6a0MaGtAa0DYADYAL5ldOjVNO5mVKxms9TX+JWd0/NomzS07QqiWrqeW7GYbsxEZ7JsI88bc793HpvGR6QqjW4z6j0HpzT9jinpmPEZXO6hiwVZmn5hcrw9q0dlLM9xBa0uEgJAO2zfWtkUFro51ujc2/SzKsmpY6cz8Yy43eF1kMPZtd1bsC7Yb7jbdYE6ih9HTZmxpPDSairw1c+6nCchDXfzxMscg7QMPpbzb7fmUwgIiICIiAsw8JjyO+I7VHxgeOeSHZw+P8AiG/bcvbx8nLt1+75P0brT1SONWTzOG4YZy5p/TEOs8xEyM18HYaHMtEysBBB9TSXf8qC4Uez8Sr9jv2PZt5N+/l26f4LnXFVc51aEvYInlgLmD707dy5UBERBQNOf+6ao/raT9VEs0zGc1jxE4v6l0lp3U/kZi9MUqctm1BQhtWbliyJHtH8cHNbG1sfXZvMST1Gy0vTn/umqP62k/VRKsa04KY3Vuqm6lp5zPaUzrqwpWLun7bIXW4GklrJWvY9ruUuds7YOG52K+ti+OXyWfFSZ8pxC1xr3V2msNrSPTbdHUqML7TMXBMcpdmr9s6SUSBwjhA5Ryx7Hcu87oFC8INey8UOMmhdV2K7atnK8N5bE0Me/K2Tx+AP5d+vLzA7b+jZX/UXg8YjO2zag1JqfCW7GOhxeRsYzIhkmUgiaWs8Zc9ji54DnDtG8r/OPnKYg4Ladx2f0llsSbmEm01Rdi6sNCflimpkN/iJmuB52Asa4dQeYb7rwtKML4f6su4DwZOGGPw+oMnh87lJbEVWrhMVDkLt0Nkmc9kbJj2bA0bOdI/zQBt0Lguzj+L2vtQ8PtI1nZiTBail1/JpTIZB2PrmaSBjLB3dD58bJCGR78hIDmnYlp2Ok1/Bm0/jcbjamKz2osQ/FZCzexVqpcjMuPbYbtNWi543DsXd/K8OIJ6EdNqvrXwbJKOH0ziNKX86+u7W0WocjdkyMZtVAa0zJp45JBu4l5Y4g85Lnu2HLuBm1UQOhqDW/FLT8/EDRuKyUmq85h4cZkqeWjx8HjwpWJXtsN7FobDJMxsT3MHKA7fuJAB6eY43Z+TA6HwOktRZPV2X1DdyEdjL1sRUhyVRlRrXSQGrO6GFk4MjAecDZocQw7hajiuAOMw2GzletqbU7c3mrENi9qY5BvwnIYduyZziPkDGjcBgZy7OcCDuumfBl0v5PQUW5LOR5eHKy5uPUsd0NybbsjQySXtAzk85gDCzk5C0AcvRW0jvcD8try9UzlbW+PvQsq2WfBl/Jw1YLVuFzAXdrHWlkjDmPBG7SA4Fp5Qd1zcfMrrHDaIgs6MbZ8aGQgGQmx9Rlu5BR3PbSV4X+bJIPN2aQehdsCQF2YMHqjh7hYKWnGya5nmmkmtXNV550EzSQ0NDTHWkby9D5oawDbpvuV1reE1lxDpux+oIjoSOF7bEGS0nqJ09l0g3HI5slNjeQhxJB5gSB09K17rCoaS4n38trLhPQx2sPKrC5rH5uW9eNCOs+1JXfXEQfHygxPj7R7HNHLuQd29wFascUdc5bNQ4mlqMY51riRf04LPiMEpiox0nyNY1pbsXNc3cOO5325uZu7TosPg36eoYnA18Zl87icphrVu5Bna1pjrssto72TKZI3Mf2h2JBZt5rdttlyad8HTTumn42SDJZq1LR1FPqZsly0yV8tuWB0LxI4s3czZxO2/NzffbdFLVDIMxr/iZpfSPFDPya7ORHD/LitFWlxFVgycIZBM4WHNYCHcs/IDF2e3Lud99hNcRtccQI8lxwu4fWJw9HQsFe7j6DMZXmbPvj47Ekcr3tLiwuDtuXZwLz5xADRqOa4EYDO6a1/hLFzJMqa0tG5kHxyxiSJ5iij2hJYQ0bQt+6DupPXu27OW4MYTMxcRY5rV9o11A2vkuzkYOxa2qKwMO7DynkG/nc3nfm6JaRn2I19qfR+tKFTVOsIshhczpK3n5LVjHxQtxUsBhLyzswC6HlmJ5XlzvMHnHcqH4R8VtZz8TcVhsxkszm8Bn8LayNG9nMLVxry+F0RD4GQuLuyc2X7mZoePNO53K1rO8FdO6lu0Z8kbdmKrgrWnTVdI0RzVbAjEnPs3m59omgFpG256d20RprwesXpzUmCzsmp9TZnI4avLSqOyl2ORgrSM5DCWNiaNhsx3MAHksbzOcBslpGPxZniFqPwPMnr/Na6fZyVrTcl3xAYag+oQ0F2z2Phdzl7W7PB83zzytGwKl9QcT9fal13l9NaSiztKlpuhQEsunsbjbJmsWK4mHai3NGGxhpaA2Ju5If5w2AWvV+C2ErcFTwwbayBwBxbsT4yZGeNdk5paXc3Jy82x7+Xb8y6Oo+AeJzWoGZzHZ/UOlMu6nHj7dvA3GQOvQxgiMTB0bmlzdzs9oa4bkA7bKaMjL9QcSOJuIuaRu61yE/DTBTYqL4QvUsXBfrMyfbuY+O28l/YROZ2Za4EAF5Bk6L0jlzviLpHd2D/8AxKz7XPAbHcQa8FLJan1RHhxRix9vFV8kBXvxMJP8dzMc4udvs57XNc4bAnotAyrQzDXGtADRXeAB6ByleuHExVF1jxSmhf8AYjT39XV/1TVOKD0L/sRp7+rq/wCqamP11pvLahs4CjqHFXc7WjM0+Mr3YpLMTA4NL3RB3M1oc5oJI23IHpXDjfuVfGSfFOIi62TtvoY23airSXJIYnyNrw7c8pAJDG7+k7bD+leSMy4G0tLady3EPTundSXc7bq6glvZKtc5nDHTWWtkFeN5aA5gA/CcQdwSD0WrKicGH2cpoanqHK6Pq6J1Jnd72VxteMCTtj5odK7la5zyxrN+Ybju3O26vaAiIgIiICpHGrGZnM8MM5T0/qeHRmYlZGK+csODWVSJWEkk+toLf+ZXdZh4THkd8R2qPjA8c8kOzh8f8Q37bl7ePk5duv3fJ+jdBpVVrm1oQ94leGAOePvjt3rlXBR7PxKv2O/Y9m3k37+Xbp/gudAREQUK61+kcxlZbFazNjshY8ajsVa75+zcWMY5j2saXDq3mDtttiQSNhvweXeJ9WR+i7X2a0RF3R2imYjTpvPxt9JavHvZ35d4n1ZH6LtfZp5d4n1ZH6LtfZrREV1jC4Jz/BsZ35d4n1ZH6LtfZp5d4n1ZH6LtfZrRETWMLgnP8Gxnfl3ifVkfou19mnl3ifVkfou19mtERNYwuCc/wbGd+XeJ9WR+i7X2aeXeJ9WR+i7X2a0RE1jC4Jz/AAbGXYbivpjUVBl7FXp8nSeXNbZp0bEsbi0kOAc2Mg7EEH84Xd8u8T6sj9F2vs10fBpyul8zwixlvRunrWlsA6xaEOMub9pG8WJBI47ud908OcOvcVqSaxhcE5/g2M78u8T6sj9F2vs08u8T6sj9F2vs1oiJrGFwTn+DYzvy7xPqyP0Xa+zTy7xPqyP0Xa+zWiImsYXBOf4NjO/LvE+rI/Rdr7NPLvE+rI/Rdr7NaIiaxhcE5/g2M78u8T6sj9F2vs1+LGofKCpNQw9O9YuWGOiY6alNBDFuNud8j2BoA3326k7dAStHRNYojbFM3+P4gvD+XvhwaL496Bs3LFzVGVzHDAu7Co7FSmCvWgJ2jgswx7dWjZokcCHdPO5jyiG/g4Is5p3UnEbXuHwcuqXYXCQY84WnKGW7L7NqNzTHzDl2aytK525B6NAB3O39VbtKvkac9S3BFaqzsMcsEzA9kjCNi1zT0II6EFZvwi8HXRnA3P6ryejqk+Mi1G6u+zju1560Doe02MII5mhxmeSC4gdA0NA2XDMzM3llz5PjdjdO+QMOcwmcxmQ1f2ccFUUXTeIzP7ICKy5m4iIdKBuenmv/AASoDiNq1nFLP5rhTpHVt/Ses8aauQvXYaMo5KrXwyOZFN0ZzObLGOhd0LgQRvtsyKAiIgIiICIiAqRxqyeZw3DDOXNP6Yh1nmImRmvg7DQ5lomVgIIPqaS7/lV3VG43Y/LZThbnq2C1TBonKvjjMOesvDI6m0rC5ziegBaHN/5kF0quc6tCXsETywFzB96du5cq62OmbYx9WVlhlpj4mubPG4ObICBs4EdCD37/AJ12UBERAREQEREBERAREQERfiaaOvE+WV7YomNLnvedmtA6kk+gIKdwgta2uaEpy8QqdKhqkyzieDHkGEMErhERs5w3MfIT17ye5XRZr4O+LoYfhVjauN1q/iFTbPZczPvl7QzkzvJbzczt+Qks7/vfQtKQEREBERAREQEREBERAREQEREBERAREQF0M9gcbqjD3MTl6NfJ4y5GYrFS1GJI5WHvDmnoV30QZhW0fqfh3mtB4DQNHB1OGlCGWpk6Fl0vjULduaOSJ+55jzAgh3UmQk797bboniJpriRjrF7TGbp5urWsSVJpKknN2crDs5rh3g9Nxv3ggjcEFWJUHX2gMzY09aj4d5ejobPWMizJWLjcbHNFdeNg9s7OhdzgNBeDzeaOqC/IqVheLeBzPE3NcP2Pts1NiKkV2eOanJHDLC8N/jInkcrmguDT179wN9jtA8LPCV0Jxm1rqrTOlMm7I29PFna2Whvi9tp3Dn13hxMjGuHKXbAEkFpc1wcQ1NERAREQEREBFlvGPwkdGcC85pPFaoszQ2dSXBUrui7MR1m8zWusWHve0RwtLxu7qdgdgdjtO664p0dC6k0ngpcXl8rkNSXDVrtxlJ0zIGt2Mk0zx0Yxgc0nrvsSQCASAkOInEXT3CnSV3Uup8gzGYioBzzOaXOc4nZrGtaCXOJ6AAKFZQ1VqnXDrc2Qw8/C63hxG3EyUXut25pfunSl+wawM2Abt1Ejg5u4BXJoXQGcwl/VVjVGq59YQ5bJeN0aVqrHHBjYWH+KijaB1I2YS7pu5ocACXF17QRemdMYnRmCp4XBY6vicTTZ2denUjDI42/mA9JO5J7ySSepUoiICIiAiIgIiICIiAiIgIiICIiAi62SutxuOtW3NLmwRPlLR6Q0E7f4LO8fpahqXHVMnnK7crkLULJpH2CXsYXNB5Y2k7MYN9gAB6zuSSenCwYxImqqbRn9YW29pqLOfi50x8hUfmQnxc6Y+QqPzIXvq+FxzlH3LsaMizn4udMfIVH5kJ8XOmPkKj8yE1fC45yj7jY0ZFnPxc6Y+QqPzIT4udMfIVH5kJq+FxzlH3GxlHh88R9b6F4VQ0NA4bLWMtnHyVrmZxlB8/wfTa3+M/jWHeKR5ewMcQfNEpBa5rSv5l+Dxxcv+D7xkweqeynbBWl7DI1Ni101V/SRux23O3nN36czWn0L+xfxc6Y+QqPzIXHNww0lYG0uncdKPU+u0pq+FxzlH3GxoGMyVXNY2pkKM7LVK3CyeCeI7tkjc0Oa4H0ggg/pXaWcM4baWjY1jMBQaxo2DWwgAD1L78XOmPkKj8yE1fC45yj7jY0ZFnPxc6Y+QqPzIT4udMfIVH5kJq+FxzlH3Gxoy4rVmGlWlsWJWQQQsMkksjg1rGgbkknuAHpWffFzpj5Co/MhPi50x8hUfmQmr4XHOUfcbH8hfCk4zW/CF425jPwCWfGNf4hh4GsJIqxuIZs3bfd5LpCPQXkepf0J/g4tVazucJruk9WaZyWIr6cfG3GZTIRTx+OwzGR5jAkGxMXKBuw7cskY5QRu/bIeF+ka42i05joh/uV2j/suX4udMfIVH5kJq+FxzlH3GxoyLOfi50x8hUfmQnxc6Y+QqPzITV8LjnKPuNjRkWc/Fzpj5Co/MhPi50x8hUfmQmr4XHOUfcbGjIs5+LnTHyFR+ZCfFzpj5Co/MhNXwuOco+42NGRZvNo3HYmtLZwtduIvxNL4Zqu7BzDrs5o6OadtiCD0/wCqvGncr8O6fxmS5QzxyrFY5R3DnYHbf4rxxcGKI0qZvHwt9ZS25IIiLlQREQEREBERAREQReqv9mMx/wDDm/8AAqvaZ/2cxX/xIv8AwCsOqv8AZjMf/Dm/8Cq9pn/ZzFf/ABIv/AL6OD+zPx+jXuUuHwhdA2tXV9M1s463mLFx1CGOvSsPiknZv2jGzCPs3Fmx5tnHl2PNtsuZ/HzQMeqvJ52oYhkvGxQ5uwm8W8Z327Dxjk7HtN+nJz82/TbfovOPD8TYbUGh9G6sGSwGmNNapnmwdi7p25DLesvknZWiltFpgG5ncd2OPaeb3EldvhdwupY7DY/h9rjTHEa9lq+RdHPPVyF84Ky3xgyx292zCBrfuXluwcHA+aSsRVMst9t+ENw+oZmxi59QCO3WvfBtk+J2DDWs8/II5pRHyREuIAL3AO9BK7es+OOiOH+XOLzmcFW8yITzRxVZrArRnfZ8zo2OELTsdjIWjYbrE9VaMz1jgNx8oRYLIy5DJanvWaFVlSQy2mF1cskiaBu8HlOzm7jzTt3Lg1Bo92l+KXESTU2n+IWao6htx38ba0bduivYjNdkTq07K8rGMe0sIDpdgWkecANk0pG5Z3jnonTucgw1rMumylinFkIKlClYuPmrSuc1krBDG/mbux25G+w2J2BBPwcdtDeWrdJuznZZx1k0mxS1J2RPsDfeJs7mCJz+h80O3/MqjoHQDNIcfJxjsNbp6do6GxuKo2JmPexgjs2CYBK7fmc1vZkjmJ25SfQsh1vR1hqHJCzm8RrvJ6jxWtK1/sKkE3wNWxkN5ro3wMYRHO7sQ09A+UOLtwACrNUwN40Hx6xeuOJOrtHspXatrCXvE4ZnUbPZ2A2Fr5HOkMQjj2c5zWtc7zg0ObuHBSOE496C1HqSHBY/UMU9+eV8FdxgmZXsyN35mQzuYIpXDY9GOceh9SoNChl8bxL4u6alxGYrHWL2WMTnq9KSSiwHHMhJkmaCI3NkiI2dsTu3bfdVDgnoPFSQaG03qXSHEatqHT7oHym/kL0mErWqrN2TRudN2Do3OZ5jYwducDlA3S8j1avP2nvCWl1VqTXlmtJXx+k9LCSFwtYLIvtzPbHGe1LmsDWtD5ADEGOk5Wl3mggr0CsS0Hp7KU8BxyjsY25BJkdRZGekySB7TajdRrta+MEee0ua4At3BII9C1NxM43wgtL43TemJNRZuvJm8tha+YbDhsfcmbYikbuZYIhG6Xk33OzhzNGxcApjNcc9EYHTWGz9nN9ticywyUJ6NSe2Z2gAkhkTHOAG433A29Oyy/gZpXNYjW3Dmxfw9+lDU4WUsdYlsVXxthtNlhLoHkgcsgAJLD5w2PRVDTFDV2m9BcPcRk8drLG6TE2bdkq+mKs7Mh25vyOqMk7MCaKF0bnuDmbA+buQ0hY0pG25rjFFPl+Fz9M2KOWwOr8hNXfdAc49kypNMDGQ4crueIA8wO3nDYHu05eQ9CaY1HpTRPCy1Z0pqD/6Y1nlHX6Dq7prkVez42I5gAT2rB4xGXPYXDq47nYr14tUzfxFKocZdH5XW82kqeXNnOwzSV5IYqsxibKxhe+LtuTsudrQSWc2427lzVOLek72mdN6hgyvPh9RWYaeLs+LSjxiWUkRt5SzmbuWnq4ADbqQskxYy+n+PXi+isLqrH4jKZezLqenlseW4hw7N3/rqtg90j3tZ5jHEO5iS1pG6pOm6moKvDbgzoOXRupGZfTGp8f8K2XYyQVIYoZZAZWzbcsjCCHBzNwB90W+maUjb7nhOcNMfYfFZ1M2AR25aEk76VkQR2Y3Oa+F8vZ8jZN2O2YXAuGxaCHAmRh49aEl01l88/PCrjMPYhq5F9ypPXkqSSvYyPtYpGNkYHGRuzi3l2JO+wJGMN0ZnviegpHBZHxwcTvhA1/E5O08W+GjJ2/Ltv2fZ+fz93L132X3jJo3PZTN8aX0sFkbcWRZpLxR0FSR4smG6503Z7Dz+RuxdtvyjbfYKaUjUZPCh4axOuMfnrLLFNoksVnYi6J4ott+2MXY84i269rtydR53UKY1bx10PoerjLOWzfJXyVbx2rNUqT2mSQbA9qTCx4azYg8zth171WrWn8hJ4QGtMh8G2XY6zoupUitdg4wyzCxbLomu22c4BzSWg77OHTqFkWAx+rsfpDhzgc9i9cwabh0ZWijx+mYZoJ35QbtfDbezlfC1rOz5Q9zI9y7mPTZW8j0PqTjZorSYwXwjm2752s+3ixUrzWjdiaIyTEImOLztKwho6kHcAgHbsUeLuk8hpnP6gjyhjxWA7QZOSzVmgfVLImyuDo3sD9+R7XDZp336bnosL4M6PzlPIeD8clgMlUfgdN5ijedbpvaKc4NaNrXOI2bzBj+Q7+e3ct3C5uNWjcha464nTVCNr9P8SWV351u/VgxkjZZHf0TQujhP/CE0ptcelJZ2WcY+aMkxyQl7SWlp2Ldx0PUfpXb4c/ye6X/AKrq/qmrguf6nP8A/pu/7Ln4c/ye6X/qur+qatYv7P8AMfKWvcsSIi+cyIiICIiAiIgIiII3UsbptOZWNgLnuqStAHpJYVW9LvEmmcQ5p3a6nCQR6RyBXZVCfQU1eVww+bs4mo4lwpiGKWKMnv5OZu7Rv97vsPQAOi7cDEpimaKpt7/9ZqPCyi43weeH2J1NHnq+nx8IxWjdi7W5YlgisEl3asgfIYmP3JPM1oIPULRlH+RWc9rJvcIfqTyKzntZN7hD9S947qPCuMp6FuaQRR/kVnPayb3CH6k8is57WTe4Q/Ul8LzI9ehbmkEUf5FZz2sm9wh+pPIrOe1k3uEP1JfC8yPXoW5pBcVqrDerTVrETJ68zDHJFI3dr2kbEEekELqeRWc9rJvcIfqTyKzntZN7hD9SXwvMj16FuamDwduFrSCOHmmQR1BGKh/ZWhqP8is57WTe4Q/UnkVnPayb3CH6k9lH/cZT0S0b0gip/EPHah0ZoDU2oINTPsT4nGWb8cMlGINe6KJzw07DfYluy6PCIaj4jcLdJ6ptakdUs5nGV78kENGIsjdJGHFrSRvsN/Sl8LzI9ei25r8s9f4PHC+R7nv4e6Zc5x3LjioSSf7quXkVnPayb3CH6k8is57WTe4Q/Unsp/7jKeiWje7VOnBj6kFWrCyvWgY2KKGJoa1jGjYNAHcAABsuZR/kVnPayb3CH6k8is57WTe4Q/Ul8LzI9ei25pBFH+RWc9rJvcIfqTyKzntZN7hD9SXwvMj16FuaQRR/kVnPayb3CH6k8is57WTe4Q/Ul8LzI9ehbmkFXqHD/T+N1jktVwY1g1FkImwWMhI98j+zaGgMZzEiNvmNJawAEjc7nqpHyKzntZN7hD9SeRWc9rJvcIfqS+FxxlPRLRvdnIPbHQsvcQ1rYnEk+gbFdrh9E6HQWmo3gtezGVmuB9BETVHx6Bnt7xZjOWcpSd0kp9hFFHMPwZOVu5b62ggEEg7gkK4LxxsSnQ0KZvtv/rr7rCIi4WRERAREQEREBERAREQEREBERAREQEREBERBRuOv8iPEL+zuR/y0iiPBd/m4cMv7O0f1LVL8df5EeIX9ncj/AJaRRHgu/wA3Dhl/Z2j+pag1BERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQUbjr/IjxC/s7kf8tIojwXf5uHDL+ztH9S1S/HX+RHiF/Z3I/wCWkUR4Lv8ANw4Zf2do/qWoNQREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERARdLMZqhgKEl3JW4aVSP7qWZ4aNz3AesnuAHUnuWd3uPNBspbjsLkL0YOwnl5K7XfnAcef/q0LrweyY/aP2qZn5ZrZqKLIfj7sey8vvzP2U+Pux7Ly+/M/ZXX+lds4PWOpZ4g/hQ+CcunuIGP4l0mPfj9QNZTvk9RFbijDY+voD4mDYD0xPPpUf8AwYHCO7qbixc13K6SHD6aicyLYkNntzRPiDdu4hsT5SfSC5nrXrXjfqCpxx4YZzRuU05LWiyEQ7G220x7q0zSHRygbDflcBuNxuNxuN11PB/yNTwf+F2L0djdPSXjWL5rV82GROtzvO7pC3Y7dOVoG52a1o3OyfpXbOD1jqWeo0WQ/H3Y9l5ffmfsp8fdj2Xl9+Z+yn6V2zg9Y6lmvIsto8eab5Wtv4PIU4ydjLC5k7W/nIBDtv6AVoeEz2P1Jj2XcZbiu1XHYSRO32PpaR3gj0g7ELkxuyY/Z9uLTMR6ZlnfREXIgiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiIC4rVqKlWmsTyNighYZJJHHYNaBuSf6AuVUfjVZfW4a5UMO3bvr1X/nZLPHG8fpa8j9K98DD77Fow+KYjOVjbLI9Saqta3yYyNnnjrN3NOo8bdgw+kj8Nw6k+jfYdB1jURfpuHh04VMUURaIYmbiIss4r62z1HVeD0vp2O82zdrT3rFjGwV5rDY43MaGsbYe2PqX9SdyABsOpImJiRhU6Uo1NFhp1XxDZW0zjcjNLgruQ1BJj23LVSs6axT8Vkka90bHvYyQOaR5p23YCQQS0/bfEfVGChzmmvhGLI52PUVPB0cxarMaGMswslEkkbA1rnMaXjoACeXp378+t0+MxMdbXsraGZKnJkJKDbUDr0cbZn1RIDK1jiQ15bvuGktcAe47H1LsrIdBYrKYfjlqSvls3Jn7PwBRc23LWjgdy9vP5pbGA07Hc77DoQPRudeXvhVziUzMxbbKC7uB1Hd0dlRlKHPJsNrNNp821GPvSO7nH3ru8Hp3FwPSRbropxKZori8SsTZ6ex9+vlaFa7VkE1axG2WKQffNcNwf+hXYVC4IWXTcP68Tvua1qzAz/hEz+UfoBA/Qr6vzLtGF3ONXh7pmG58RERc6CIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAoHXennaq0hlMXGWieeHeAu7hK0h0ZP5g9rVPIt0Vzh1xXT4xtHlaCUzRNc6N8T+50cg2cxw6FpHoIO4P9Cr2byWrK2QfHicBi8hSAHLPay767yduoLBXeB1/3v+i9AcQ+FkmXsy5bBdlHkJPOsVZXFsdg7AcwOx5X7D1bO6b7fdLJb1a9iZTFkcVkKEgOxE1Z5b+h7QWO/Q4r9E7P2zC7ZRE0VWn3xsv6+7mW3KWc1r3ptpPB/n31BL+6LrZXQ0/ECGhez0T9MZ/GzPNK7gMkZZYmOaA4c74Wgh3cWFhHmg7+q4fCtb8J/wA076k+Fa34T/mnfUuucLSi1czMfx0TRncrrOG1MxaeFnJ5TITYS6+/DYuWBJLLI5kjCJCW9W7SO2DeXbYbdBsurmuEGDz3lEbUl3tM1ar3nyxTBj6s8EbGRSQOA3YQGA7nfrv6DsrZ8K1vwn/NO+pPhWt+E/5p31Kzg0TFpj/Wt8jRncpFDh7d0TkredxFq7qvOXIIaU3lBkmwt7FjnuBDo4HbHd+23Lse/od95EZnXmzt9KYMHbptqCXqfdP6VZvhWt+E/wCad9SfCtb8J/zTvqUjC0dlEzEfx9YNGdyEw+U1fYyMMeT09iaNE79pYrZmSeRnQ7bMNZgO52H3Q23367bKx2Jm14HyuDnBg35WDdx/MB6SfQFyUYbeWlbHj8bfvSE7bQVXlo/pcQGgfnJC1Th9woloXIMvqBsZtwnnrUI3c7IXeh73dznj0Aea09fOPKW8vaO14XY6JnEqvO7Zf0+a6O9beHWnpdL6NxtCwALYa6awBt0lkcXvHT1FxH6FZERfneJXOLXNdXjM3PEREXmCIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiD/2Q==", + "text/plain": [ + "" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from IPython.display import Image\n", + "\n", + "Image(app.get_graph().draw_mermaid_png())" + ] + }, + { + "cell_type": "markdown", + "id": "74f3e276-f003-4112-ba14-c6952076c4f8", + "metadata": {}, + "source": [ + "## Invoke graph\n", + "\n", + "When running the application, we can stream the graph to observe its sequence of steps. Below, we will simply print out the name of the step.\n", + "\n", + "Note that because we have a loop in the graph, it can be helpful to specify a [recursion_limit](https://langchain-ai.github.io/langgraph/reference/errors/#langgraph.errors.GraphRecursionError) on its execution. This will raise a specific error when the specified limit is exceeded." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "0701bb7d-fbc6-497e-a577-25d56e6e43c6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['collect_summaries']\n", + "['collapse_summaries']\n", + "['collapse_summaries']\n", + "['generate_final_summary']\n" + ] + } + ], + "source": [ + "async for step in app.astream(\n", + " {\"contents\": [doc.page_content for doc in split_docs]},\n", + " {\"recursion_limit\": 10},\n", + "):\n", + " print(list(step.keys()))" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "0dc27458-7b37-4a2b-9452-b59274a55828", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'generate_final_summary': {'final_summary': 'The consolidated summary of the main themes from the provided documents highlights the advancements and applications of large language models (LLMs) in artificial intelligence, particularly in autonomous agents and software development. Key themes include:\\n\\n1. **Integration of LLMs**: LLMs play a crucial role in enabling autonomous agents to perform complex tasks through advanced reasoning and decision-making techniques, such as Chain of Thought (CoT) and Tree of Thoughts.\\n\\n2. **Memory Management**: The categorization of memory into sensory, short-term, and long-term types parallels machine learning concepts, with short-term memory facilitating in-context learning and long-term memory enhanced by external storage solutions.\\n\\n3. **Tool Use and APIs**: Autonomous agents utilize external APIs to expand their capabilities, demonstrating adaptability and improved problem-solving skills.\\n\\n4. **Search Algorithms**: Various approximate nearest neighbor search algorithms, including Locality-Sensitive Hashing (LSH) and FAISS, are discussed for enhancing search efficiency in high-dimensional spaces.\\n\\n5. **Neuro-Symbolic Architectures**: The integration of neuro-symbolic systems, such as the MRKL framework, combines expert modules with LLMs to improve problem-solving, particularly in complex tasks.\\n\\n6. **Challenges and Innovations**: The documents address challenges like hallucination and inefficient planning in LLMs, alongside innovative methods such as Chain of Hindsight (CoH) and Algorithm Distillation (AD) for performance enhancement.\\n\\n7. **Software Development Practices**: The use of LLMs in software development is explored, particularly in creating structured applications like a Super Mario game using the model-view-controller (MVC) architecture, emphasizing task management, component organization, and documentation.\\n\\n8. **Limitations of LLMs**: Constraints such as finite context length and challenges in long-term planning are acknowledged, along with concerns regarding the reliability of natural language as an interface.\\n\\nOverall, the integration of LLMs and neuro-symbolic architectures signifies a significant evolution in AI, with ongoing research focused on enhancing planning, memory management, and problem-solving capabilities across various applications.'}}\n" + ] + } + ], + "source": [ + "print(step)" + ] + }, + { + "cell_type": "markdown", + "id": "f15c225a-db1d-48cf-b135-f588e7d615e6", + "metadata": {}, + "source": [ + "## Next steps\n", + "\n", + "Check out the [LangGraph documentation](https://langchain-ai.github.io/langgraph/) for detail on building with LangGraph, including [this guide](https://langchain-ai.github.io/langgraph/how-tos/map-reduce/) on the details of map-reduce in LangGraph.\n", + "\n", + "See the summarization [how-to guides](/docs/how_to/#summarization) for additional summarization strategies, including those designed for larger volumes of text.\n", + "\n", + "See also [this tutorial](/docs/tutorials/summarization) for more detail on summarization." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/how_to/summarize_refine.ipynb b/docs/docs/how_to/summarize_refine.ipynb new file mode 100644 index 0000000000000..4364785217e04 --- /dev/null +++ b/docs/docs/how_to/summarize_refine.ipynb @@ -0,0 +1,333 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "c47f5b2f-e14c-43e7-a0ab-d71562636624", + "metadata": {}, + "source": [ + "---\n", + "sidebar_position: 3\n", + "keywords: [summarize, summarization, refine]\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "682a4f53-27db-43ef-a909-dd9ded76051b", + "metadata": {}, + "source": [ + "# How to summarize text through iterative refinement\n", + "\n", + "LLMs can summarize and otherwise distill desired information from text, including large volumes of text. In many cases, especially when the amount of text is large compared to the size of the model's context window, it can be helpful (or necessary) to break up the summarization task into smaller components.\n", + "\n", + "Iterative refinement represents one strategy for summarizing long texts. The strategy is as follows:\n", + "\n", + "- Split a text into smaller documents;\n", + "- Summarize the first document;\n", + "- Refine or update the result based on the next document;\n", + "- Repeat through the sequence of documents until finished.\n", + "\n", + "Note that this strategy is not parallelized. It is especially effective when understanding of a sub-document depends on prior context-- for instance, when summarizing a novel or body of text with an inherent sequence.\n", + "\n", + "[LangGraph](https://langchain-ai.github.io/langgraph/), built on top of `langchain-core`, is well-suited to this problem:\n", + "\n", + "- LangGraph allows for individual steps (such as successive summarizations) to be streamed, allowing for greater control of execution;\n", + "- LangGraph's [checkpointing](https://langchain-ai.github.io/langgraph/how-tos/persistence/) supports error recovery, extending with human-in-the-loop workflows, and easier incorporation into conversational applications.\n", + "- Because it is assembled from modular components, it is also simple to extend or modify (e.g., to incorporate [tool calling](/docs/concepts/#functiontool-calling) or other behavior).\n", + "\n", + "Below, we demonstrate how to summarize text via iterative refinement." + ] + }, + { + "cell_type": "markdown", + "id": "4aa52e84-d1b5-4b33-b4c4-541156686ef3", + "metadata": {}, + "source": [ + "## Load chat model\n", + "\n", + "Let's first load a chat model:\n", + "```{=mdx}\n", + "import ChatModelTabs from \"@theme/ChatModelTabs\";\n", + "\n", + "\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "e5f426fc-cea6-4351-8931-1e422d3c8b69", + "metadata": {}, + "outputs": [], + "source": [ + "# | output: false\n", + "# | echo: false\n", + "\n", + "from langchain_openai import ChatOpenAI\n", + "\n", + "llm = ChatOpenAI(model=\"gpt-4o-mini\", temperature=0)" + ] + }, + { + "cell_type": "markdown", + "id": "b137fe82-0a53-4910-b53e-b87a297f329d", + "metadata": {}, + "source": [ + "## Load documents" + ] + }, + { + "cell_type": "markdown", + "id": "a81dc91d-ae72-4996-b809-d4a9050e815e", + "metadata": {}, + "source": [ + "Next, we need some documents to summarize. Below, we generate some toy documents for illustrative purposes. See the document loader [how-to guides](/docs/how_to/#document-loaders) and [integration pages](/docs/integrations/document_loaders/) for additional sources of data. The [summarization tutorial](/docs/tutorials/summarization) also includes an example summarizing a blog post." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "27c8fed0-b2d7-4549-a086-f5ee657efc41", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_core.documents import Document\n", + "\n", + "documents = [\n", + " Document(page_content=\"Apples are red\", metadata={\"title\": \"apple_book\"}),\n", + " Document(page_content=\"Blueberries are blue\", metadata={\"title\": \"blueberry_book\"}),\n", + " Document(page_content=\"Bananas are yelow\", metadata={\"title\": \"banana_book\"}),\n", + "]" + ] + }, + { + "cell_type": "markdown", + "id": "84216044-6f1e-4b90-b4fa-29ec305abf51", + "metadata": {}, + "source": [ + "## Create graph\n", + "\n", + "Below we show a LangGraph implementation of this process:\n", + "\n", + "- We generate a simple chain for the initial summary that plucks out the first document, formats it into a prompt and runs inference with our LLM.\n", + "- We generate a second `refine_summary_chain` that operates on each successive document, refining the initial summary.\n", + "\n", + "We will need to install `langgraph`:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bf7acdb7-19ca-43ba-98f4-91f5b804da21", + "metadata": {}, + "outputs": [], + "source": [ + "pip install -qU langgraph" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "669afa40-2708-4fa1-841e-c74a67bd9175", + "metadata": {}, + "outputs": [], + "source": [ + "import operator\n", + "from typing import List, Literal, TypedDict\n", + "\n", + "from langchain_core.output_parsers import StrOutputParser\n", + "from langchain_core.prompts import ChatPromptTemplate\n", + "from langchain_core.runnables import RunnableConfig\n", + "from langgraph.constants import Send\n", + "from langgraph.graph import END, START, StateGraph\n", + "\n", + "# Initial summary\n", + "summarize_prompt = ChatPromptTemplate(\n", + " [\n", + " (\"human\", \"Write a concise summary of the following: {context}\"),\n", + " ]\n", + ")\n", + "initial_summary_chain = summarize_prompt | llm | StrOutputParser()\n", + "\n", + "# Refining the summary with new docs\n", + "refine_template = \"\"\"\n", + "Produce a final summary.\n", + "\n", + "Existing summary up to this point:\n", + "{existing_answer}\n", + "\n", + "New context:\n", + "------------\n", + "{context}\n", + "------------\n", + "\n", + "Given the new context, refine the original summary.\n", + "\"\"\"\n", + "refine_prompt = ChatPromptTemplate([(\"human\", refine_template)])\n", + "\n", + "refine_summary_chain = refine_prompt | llm | StrOutputParser()\n", + "\n", + "\n", + "# We will define the state of the graph to hold the document\n", + "# contents and summary. We also include an index to keep track\n", + "# of our position in the sequence of documents.\n", + "class State(TypedDict):\n", + " contents: List[str]\n", + " index: int\n", + " summary: str\n", + "\n", + "\n", + "# We define functions for each node, including a node that generates\n", + "# the initial summary:\n", + "async def generate_initial_summary(state: State, config: RunnableConfig):\n", + " summary = await initial_summary_chain.ainvoke(\n", + " state[\"contents\"][0],\n", + " config,\n", + " )\n", + " return {\"summary\": summary, \"index\": 1}\n", + "\n", + "\n", + "# And a node that refines the summary based on the next document\n", + "async def refine_summary(state: State, config: RunnableConfig):\n", + " content = state[\"contents\"][state[\"index\"]]\n", + " summary = await refine_summary_chain.ainvoke(\n", + " {\"existing_answer\": state[\"summary\"], \"context\": content},\n", + " config,\n", + " )\n", + "\n", + " return {\"summary\": summary, \"index\": state[\"index\"] + 1}\n", + "\n", + "\n", + "# Here we implement logic to either exit the application or refine\n", + "# the summary.\n", + "def should_refine(state: State) -> Literal[\"refine_summary\", END]:\n", + " if state[\"index\"] >= len(state[\"contents\"]):\n", + " return END\n", + " else:\n", + " return \"refine_summary\"\n", + "\n", + "\n", + "graph = StateGraph(State)\n", + "graph.add_node(\"generate_initial_summary\", generate_initial_summary)\n", + "graph.add_node(\"refine_summary\", refine_summary)\n", + "\n", + "graph.add_edge(START, \"generate_initial_summary\")\n", + "graph.add_conditional_edges(\"generate_initial_summary\", should_refine)\n", + "graph.add_conditional_edges(\"refine_summary\", should_refine)\n", + "app = graph.compile()" + ] + }, + { + "cell_type": "markdown", + "id": "cdc11401-8640-4cf8-a713-4031df690cf7", + "metadata": {}, + "source": [ + "LangGraph allows the graph structure to be plotted to help visualize its function:" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "21711ff5-4e06-4843-9109-e7d89e679449", + "metadata": {}, + "outputs": [ + { + "data": { + "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/4gHYSUNDX1BST0ZJTEUAAQEAAAHIAAAAAAQwAABtbnRyUkdCIFhZWiAH4AABAAEAAAAAAABhY3NwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAA9tYAAQAAAADTLQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlkZXNjAAAA8AAAACRyWFlaAAABFAAAABRnWFlaAAABKAAAABRiWFlaAAABPAAAABR3dHB0AAABUAAAABRyVFJDAAABZAAAAChnVFJDAAABZAAAAChiVFJDAAABZAAAAChjcHJ0AAABjAAAADxtbHVjAAAAAAAAAAEAAAAMZW5VUwAAAAgAAAAcAHMAUgBHAEJYWVogAAAAAAAAb6IAADj1AAADkFhZWiAAAAAAAABimQAAt4UAABjaWFlaIAAAAAAAACSgAAAPhAAAts9YWVogAAAAAAAA9tYAAQAAAADTLXBhcmEAAAAAAAQAAAACZmYAAPKnAAANWQAAE9AAAApbAAAAAAAAAABtbHVjAAAAAAAAAAEAAAAMZW5VUwAAACAAAAAcAEcAbwBvAGcAbABlACAASQBuAGMALgAgADIAMAAxADb/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsKCwsNDhIQDQ4RDgsLEBYQERMUFRUVDA8XGBYUGBIUFRT/2wBDAQMEBAUEBQkFBQkUDQsNFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBT/wAARCAEvAQsDASIAAhEBAxEB/8QAHQABAAICAwEBAAAAAAAAAAAAAAYHBQgBAwQCCf/EAFYQAAEDBAADAggICAgMBQUAAAEAAgMEBQYRBxIhEzEIFBUWIkGU0xcyUVRVVmGVI1NxkpPR0tQzQlJ0dYKxsgkkJTY3OHJzgZGztDQ1Q2JjZIOho9X/xAAbAQEBAAMBAQEAAAAAAAAAAAAAAQIDBAYFB//EADQRAQABAgMFBgMIAwEAAAAAAAABAhEDElEEExQxkSFBUmGh0QUVUyMzcYGxweHwIjLxQv/aAAwDAQACEQMRAD8A/VNERAREQEREBERAXVUVUNJGZJ5o4Ix/HkcGj/mVhblcay518tqtEniz4eXxu4OjD20+xsMYD0dKQQdHYaCC4HYa7rpuHthjk7aqoGXWsI06ruf+Myn1nTn75Rv+K3QGhoDQW+KKaYviTbyhbavecqsoOjeKDf8AOWfrXHnVZPpig9qZ+tcnF7MTs2ig3/NmfqTzWsv0RQezM/Ur9j5+i9jjzqsn0xQe1M/WnnVZPpig9qZ+tc+a1l+iKD2Zn6k81rL9EUHszP1J9j5+h2OPOqyfTFB7Uz9aedVk+mKD2pn61z5rWX6IoPZmfqTzWsv0RQezM/Un2Pn6HY486rJ9MUHtTP1r10dzo7hvxWrgqddT2MjX/wBhXl81rL9EUHszP1Ly1mCY7XaMtkoRICC2WKBscjSO4te3TgftBT7Ge+fT+E7GeRRcvq8L5XVFVPcrDsNdLUHnqKLZ1zPf3yRfK47czW3FzSSyULXXRl7Ym8STAiItaCIiAiIgIiICIiAiIgIiICIiAiIgIiIC8V6ukdks9fcZgTDRwSVDwPW1jS4/2L2rEZfan37E73bI/wCErKGenbv5XxuaP7VnhxTNcRVyusc3XhlsktWN0UdQWurpWeMVcjd/hJ3+nI7r11zE6HqGh6lm1j8eujL3YbdcI9hlVTxzAOGiOZoOiPURvRHqWKyriZh+C1MNNkmV2PHqidnaRRXW4w0z5G71zND3Akb6bCuJNU11TVzuTzSVQribxWtvC+Gztqrdc71cbxWeI2+12eFstTUyhjpHaD3saAGscSXOHcvKfCC4XBgeeJOIBhJAd5dpdEjWx/CfaP8AmofxSyPF+MmMR2/GLVa+LwpauOaogx/I6WCrth5X9lUxSiQcj+YaBD2nRd1OiDrR05r4QV9sPEPhvabdgt/rbfkdBW1tVSGmgjrWuiazliaJKhga5nMXSB3qczlJPMBJs54+W7h3f5aO84xk8VmglghqMmjt7XWyB0paGl0nPzloL2guawgHYJ6FVvTYPxUx+g4M5NcrY7N8nxmC5Ul3omXGGOpdHVNaIndtIWxyPjbFG152C47I2oZxn4FZzxBPEaOowOLJ75d5mVFgyGvvELYLVStjiIo44nOLo5Q9kreZrQ15k254CDYCt46W+LibccEoMcyC9Xu3NpJKp9BTwmnhiqN8srpHytAa3XpD43eWtcA7WF4Acar7xWrcsp7xidys0dsvNdRwVkrIG07Y4ZRG2B/LO95nAJLiG8mwdO7gslw+xO9W/jVxIyavtrqC2XyiszKN8k0T3OfDFOJmEMcSCwyNGz0O/RJCjfDuounBO951S5fQ0dnw+vyGuvdNl1XdqaGk5aqRr2QPY94e2QOLm93KdDR6oL2RQBvhB8LXnTeJWIOOidC/UvcOp/8AUXtsfGfh9k91p7ZZs6xq7XKoJENHQ3enmmkIBJDWNeSdAE9B3AoJfLEyeJ8UrGyRvaWuY8bDge8EesKO4DK9lmntsjzI+01UtAHOJJMbDuLZPUnsnR7J7zsqSqMYKO3ZfbgN9nXXWeSPY1tsYbBv8h7EkH1gg+tdFH3VV/Lr/wAuscknREXOgiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgiolbgtTUGfTMdqJXTifrqikeS5/P6hE5xLubuaXHehoiRGGmrmRylkVQxzQWP0HAg9QQfkXoUZk4fWuOR8lvfWWRzyS5tsqXwxkk7J7IHs9k9d8u/tXRmoxO2ubTrzv+P97V582d8m0nzWH9GP1LshpoaffZRMi338jQNqOOwmcknzovw36hPF0/8A1rjzIn+tN+/Txe6Td4fj9JW0apSii3mRP9ab9+ni90qm4iXnIMX4/wDCPDaPJ7qbRlLLu6vMr4zKDTUzZIuR3IA30id7B2PkTd4fj9JLRq2CXxLEyZhZIxsjT/FcNhRnzIn+tN+/Txe6TzIn+tN+/Txe6Td4fj9JLRqkHk2k+aw/ox+pfUdDTRPD2U8THDuc1gBCjvmRP9ab9+ni90vrzCpp+lddbxco+m4p657GO18rY+UEfYdg/ImTDjnX6f8AEtGrsut5kvE81nsswdVD0KutYdsom9xGx0M2vis9XRzumg7NW6309pt9NRUkQhpaaNsUUbe5rWjQH/ILmgt9La6OKkoqaGkpYm8scEDAxjB8gaOgC9CwrriYy08v1/voCIi1IIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgLXfjP8A64Pg5/7vI/8AsWLYha78Z/8AXB8HP/d5H/2LEGxCIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAtd+M/8Arg+Dn/u8j/7Fi2IWu/Gf/XB8HP8A3eR/9ixBsQiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIuHODGlziGtA2Se4KFuy+93YCosttofJr+sNRcKh7HzN9TxG1h5WnvGzsjvAW7DwqsW+VYi6aooR5czD5jY/apvdp5czD5jY/apvdrdwtesdYLJuihHlzMPmNj9qm92nlzMPmNj9qm92nC16x1gsm6KEeXMw+Y2P2qb3aeXMw+Y2P2qb3acLXrHWCybooR5czD5jY/apvdp5czD5jY/apvdpwtesdYLJuihHlzMPmNj9qm92nlzMPmNj9qm92nC16x1gskmT47Q5fjd2sVzjM1tulJLQ1UbXcpdFIwseN+rbXFfhLxc4W3ThHxPvuF3GN0lbbqswxua3/AMRGesUjR8j2FrgPt13r9tvLmYfMbH7VN7tU5xF8HmXiVxpw7iRcqGzNuePD0qZs8pjrCwl0Bf8Ag+hjeS4a7+gPQJwtesdYLJr4JHBQcBuB1jx6dgbeKgG43U//AFUobzN/qNayPfr7PfrVyqEeXMw+Y2P2qb3aeXMw+Y2P2qb3acLXrHWCybooR5czD5jY/apvdp5czD5jY/apvdpwtesdYLJuihHlzMPmNj9qm92nlzMPmNj9qm92nC16x1gsm6KEeXMw+Y2P2qb3aeXMw+Y2P2qb3acLXrHWCybooR5czD5jY/apvdp5czD5jY/apvdpwtesdYLJuihHlzMPmNj9qm92vpuQZbCeeS12ioYOpjhrZGPI/wDaXR639h0PtCcLXrHWCyaovFZrvT323RVtNzCN+2lkjeV7HNJa5jh6iHAgj5QvauSYmmbTzQREUBERAREQYvKCW4zdyDoijmII/wBgqPYyAMbtIAAApIug/wBgKQ5V/mxeP5nN/cKj2M/5uWr+aRf3Avo4P3M/j+zLuZJFqZiXGDM8qv3D+4+e0ck9+yWpt9ywako6YS22mh7ffM4tMw5OxZ2hf39oOXl6E8Yhxe4v8QaC35pj9ovlXbq6u5oLN4jbG2t1EJzG4GodUiqEojDnc/KBzjXZ6WOeGLbRFq1kfEriFb8U4nZtBloZR4dktRRU9k8m05iqqWOSIuZLIW8++WQhpYWkaBJdterLOJ/EnLs/zSgw2C+09BjVU23QNtFuttRFU1HYskcal1XUMkDdyAARAeiN8xJ0LmgbNotfrXkXEnN+LNJjtVfHYPDHh9uvFyoKOkpqmWGvknmZLGySRrxyeho75ujG8uiSTHMl40ZVaOI1JcLJfrrkOJOyqnsNZE+yUsNrhEs4gfHHU84qJJY3O+OA5hc0g6TMNomyMc9zA5pc3XM0HqN9219LW7hxFc8T4m8d8lqcluNdbrTcjVT2nxemDKkC3QyN24Rc4LG6Y3lcAQwF3MSScfwz4h8Y8oqcNyJ1tvVfaL5LTz19HU0NshtlLRztDjJTyx1JqT2Yc1w7Rri8A7a0nQZhtCvJQXiguktZFRVtNVy0c3i9SyCVr3QS8odyPAPou05p0euiD6161qjT5pmNgo8hobdkEDLrPxThx511ktNKHvppaWAkyMjYxsjxz9Hn0jytBcQNKzNhtLcrlR2a31FfcKqChoaaMyz1NTII4omAbLnOcQGgDvJXbFUwzCMxyskErO0YWuB529PSHyjqOv2hawcQcqzG3YTx0xusyg3SoxW2U1zo7nVWuje+ohmgle6mnhMRhe3cLhvsweV3y9VlWWK73XwqMfqKXKK21RDCYqp1NS0tKY3xNq4g+n9OIkMeepIIcO5rmjopmGwdHeKC41VbTUlbTVVTRSCKqhhla98Dy0ODXgHbSWuB0ddCD61xd7zb7Bb5K+6V1NbaGItD6mrmbFGwucGt25xAG3EAfKSAtcYc2uWK03F6KryqWiucWVUtut1xorFSz11Q+Wmp3sp2QsYxs0pD3RtdJzEAAuJDSonlefZTkvBHizj+WGtmrsfudoZDU3Okp6WrfDNPTStE0dO50XMDzaLD1BbsA7CmYbjIiofivm2VYrxZt7bhkdThuASU9M2nulPaYqylnq3TESQ1krgXU4LezDHDlbtx27Y0s5mwvhFr9cuKWUU/Cvj7eI7ny3HF7pcae0TeLxHxaOKjgkjHLy6fp73Hbw4nejsaCwuS5xxBulXxVmteZuslNiFiortSU8Vsppu3lfRPmeyRz2E9mXRno3TgX9HAABY5oGzaLXG38Qc5x27Ysbpkzb3T5didwvDYDb4YG22qghglHYlo26MifXLIXn0Qeb1LDY7lPE67jgy+XiI9reIFtfNWtZZqT/EnNoxUh1P6HxjotPac7fSJDR0aGYbTotaBxYySTA6u01mWV0GW0mXVmO0tVZbLBVV93bAHOHJA/UMbuQtc95AY0MPdzBYul4zcRLjwupI23DyZldPxBp8Tlra+3wB8sL5GdZ4WOdGHcsoDhE4fF9Fw3tM0DatFSN4ze9cE84tsWX5TU5BilxstdKK2spaaGSGtpeaocNwxsGn0xeADvrT/ACuO51warMiufDDHrhlc/bX+vpvHalvZtj7HtXGRkOmgD8GxzY962eTZJJJVib9glPDY/wCTbuPULtV6H/3NqXKIcNf/AC68f0vV/wB9S9aNp++qWeYiIuVBERAREQYvKv8ANi8fzOb+4VHsZ/zctX80i/uBS6tpI6+jnppd9lNG6N2u/RGj/aq/pblU4tRU1suVruUs1LG2EVNDRSVMU4aAA8dm0lu9dWuAIOx1Gifo7P8A5Yc0RzuyjtizXzFOF/EbHOKsdwslsvFlpqi8me6V12utsraOqoTKXSNbyQCrc9zdcvO70ToFxAVsWDgBasUyDx6yZFktqtPjzrj5t0twDbaJnO536Zyc4Y5xLjGHhhJPoqYeedP9FX77kq/dp550/wBFX77kq/drbGz1x/5kyzoi9y4EWC6YXm2MS1lybQZbcJrlXSMljEsckvJzCIlmg38G3QcHHqepXTk/AS1X7K7lkNvyHI8Tr7rHHHcxj9e2nZX8jeVjpA5jiHhvoh7C12vWpd550/0VfvuSr92nnnT/AEVfvuSr92ruK/CZZ0dFFgFvoc/q8vZPVvudTa4LQ+OR7TF2UUkkjXAcvNzkyu2S4jQHQdSYFcfBfx64OqIm5BktJbHXTy1TWqmrmNpaKt7btzNE0xkncnM7kkL2AuJDQdEWJ550/wBFX77kq/dp550/0VfvuSr92m4r8MmWdGCbwdtkHES45dS3S7Uj7oGeU7PFOw2+vc2EwtfLG5hOwzQ9FzQeVuwdLFYdwJpOG9VBNYMiyWW228Sut+M1d0/ybCXNcBH0jMhYOY6D3PDehA2Apl550/0VfvuSr92nnnT/AEVfvuSr92m4r8MmWdEbGRcUtjeDYyB6yMrm/wD5665eBFgmqaic1lyD58qiy9wEsehVxxxxtYPQ/gtRt2PjbJ9JZXJOLWP4dZp7tfvKNltcHL2tbcLZUQQx8zg1u3uYANkgDr1JAXrt/EO23agp62io7zV0dTG2aGogs9U+ORjhtrmuEeiCCCCE3GJ30ymWWFyLgnY8mkz59VV3CM5pboLZcOxkjHZRRMlY0w7YeVxEztl3MNgdB6/rJuDNtyG/2K+U95vVgvFopDb2VlpqGRvqKYua4wyh7HBzeZjT0AIPcQpB550/0VfvuSr92nnnT/RV++5Kv3abivwyuWdESvnADH76b/K+vu1JW3a809/bW0s7GTUVZDEyKN8B5CAOVnUPDweZ3qOh4o/Brxt9szCir7rfru3LIYGXSeurQ+WSWEns52ODByPHogBumARs00a6zrzzp/oq/fclX7tPPOn+ir99yVfu03FfhMs6I46p4h46yG12vHrVkNBRxRwRXW8ZLJDWVQawAyTMZQuaHkgk6Oj39N6GJyPgzNxbhZUZncbvaIqhkcVfi9ovPb2uoZHKXs5i+Bj/AEuhdy8hPQEkAFZ6w8asVym4XSgs1TWXautUvY19NRW+eWSkfsgNla1hLDtrho6+KfkKzfnnT/RV++5Kv3abjE76ZTLKB5n4NlhzOTKmPv2RWi25R6d1tdrrI4qaebs2x9tp0bnB3KxmwHcruUczXdd5w8FrIW5s3xq4ay22w2uu/CM/BxRU74GmL0OjuV5JLuYb10A6KQeedP8ARV++5Kv3aeedP9FX77kq/dpuK/CuWdGBruDFkr5sWkkqq8Ox20VVmpOWRmnw1EUUT3Seh1eGwtII0Nk7B6ALVwZstoZw6bDVV7hg1M6ltvPIw9sx1N4sTNpg5jydfR5fS+zos9550/0VfvuSr92nnnT/AEVfvuSr92m4r8JlnRC67wd7DUGWelu97tV0N+qshgulDURNqKWoqGdnMyPmjLTG5nTle1x+1fFs8HDHbVSGmjut8nidkdNlLzVVbZnvroeTbi5zC4tkLGl7d9/xeQdFN/POn+ir99yVfu0886f6Kv33JV+7TcV+Eyzor3wguG1fxl82sTdY4p8ebcqe53C8T1TGiBkTjzwsi6ve+RhczfRoDzs+pXCAAAANALAeedP9FX77kq/drlmWtnPJT2W+zzHo2M2uaHmPyc8rWsH5XOA+1NzXHbZLSyXDX/y68f0vV/31L1hMQsk1itDo6ksNXUTy1U4jO2tfI8u5QdDYaCG70N63obWbXBtFUV4tUxyJ5iIi50EREBERAREQEREBERAREQFEOJPFXG+E1st9dkdZJTsuFdDbqSGCB88088h01rI2AudobJ0O4HvOgfjKeK9hxHOMWxGtdVy33I3yiigpKV8wayNu3yyOaNMYDygk9xcCegJHn4YYVkmO2isZmuUDNLtLc5q6CpdRRwR0cZ9GOOJo6jTd9SSdvcN67w+LHh2TVeVZjPmF6t1/xa4SQx2iwNt7RFSQsGy6Uu2XyOeeu9j0Gka3ytnvciICIiAiIghHEvD79dcVu4wK7UeJZbVSQ1Dbq+iZK2d8Zbpkw0S5rmt5C7qQ09Ae5fVk4q2Ot4g1nD6e4A5nbrfDX1NP4tJDHNG/QMkJdsOaHEb053LzAEkg6mqw2VWCa/WO501vr32O7VVHJS094p4mPnpS4dHN5h10dO19nq70GZRVbj3EMcPLhgnDzOb3Pec2vFFIWXiK2uhpK2aLq5nM0crX8uzrp0aSeXmaDaSAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgKurtxPor/nl64Z2SpuVFlMVofWOvENv7Wktz3+jDzOcORzySXBvUHkcCQeisVQvDxmvn1mpyEUXmx21N5vGn123Z9ke37XXXfaa1v1IMhw+xOsw7ELTarpfqzKrnRQmKW9XFrRUVBLuZxPL3DegBsnTW7LiNmSIiAiIgIiICIiAiIg+JImy62BzNJLXaBLTojY369E/81TrMoPg1YraaTOsnv8AmsFzvbqSnvctuEjqGOXZibUuiHVoI5efWyXgBoaOlyqG8XxmZ4dXj4PhRHMNReIeUNdhvtWc/Nvp/B8+vt0gmIOxsdQuV8Rc3ZM59c+hza+X1r7QEREBERAREQEREBERAREQEREBERAREQFWXDi1WSi4rcUKq3ZXNerpV1NC642eRxLLS5sBEbWj1do30j+RWaqy4cXWyVvFbihS27FJrLdKSpoW3G8SNIZdnOgJjc0+vs2+ifyoLNREQEREBF1vqIo3cr5WNd8jnAFfPjkH4+P88K2kdyLp8cg/Hx/nhPHIPx8f54S0juRdPjkH4+P88J45B+Pj/PCWkdy/PLwxfDduFBU5xwiuOCV1nmjqGQx3qhyDspnwtlZNFMxvix5e0Y1uxzHQeRs6X6D+OQfj4/zwtEv8J7wRjyTGrVxKs7Gy3G1ctvubItEvpnuPZSdP5Eji37RKPU1LSLV8ErwyK7wnchvFtZgL8etloomzTXTyt40DK54bHEW9izRc0Su3s/wZ6dVs8qG8DTgvT8B+CFptdYIoshuX+UbqS4czZngcsR6/+mzlboHXMHEd6vPxyD8fH+eEtI7kXT45B+Pj/PCeOQfj4/zwlpHci6fHIPx8f54TxyD8fH+eEtI7kXT45B+Pj/PC+45o5d8j2v138p2lpH2iIoCLgnQ2egWNx/J7NllE+ssd2obzSMkMTqi31LJ42vABLS5hI3og6+0IMmiIgIiICIiAiIgKF4f57efOa+cPiXmv21N5veL67Xs+yPb9rrrvtNa36lNFWXDi1WSi4rcUKq3ZXNerpV1NC642eRxLLS5sBEbWj1do30j+RBZqIiAozntxnpLdQ0tPO+mfcayOjdNGdPYwhzncp9RLWEA+rexo6Kkyh/Eb42M/0xH/ANGZdOzRE4sXWObGjh/jGhzY9a5D63S0cb3H7S4gkn7SnwfYt9W7R7BF+yu/L8vtGBY1X5BfavxC0UDO0qKns3ydm3YG+VgLj1I7gVGZeO2EQY7NfZbxJFamVTaKOd9DUNNVK5oc1tO0x81Rtp2DEHgjZB6Fd+/xPHPUvOrP/B9i31btHsEX7KfB9i31btHsEX7KrbiR4T+NYzwor8ux6oF7mZVMtsFOaSpHZVb3NaG1DBH2kXKHBxDw0u6Nbtzmg+vH+LlbJkuF2K53e0yV10tdVdaxkdmuFG6aFujC6nEoc1haN9oyV3ONt00b0pxGJ456l51T74PsW+rdo9gi/ZT4PsW+rdo9gi/ZUbxLwgMBzq6Wu32S/isqLpEZqFzqSeKKqDWc7mxyvjDHPa3ZcwO5m6IcAQdfb+PmAx5V5vOyGIXLxsUHN2E3i3jO9dh4xydj2m+nJz82+mt9E3+J456l51SH4PsW+rdo9gi/ZT4PsW+rdo9gi/ZWfUKoOMuH3XN5sSo7uam+wzSU8kMVLMYmysYXvi7bk7Lna0ElnNsa7ld/ieKepedWW+D7Fvq3aPYIv2U+D7Fvq3aPYIv2Vj6Ti3iddjON5DBdeez5FUw0drqfFpR4xLKSI28pZzN2Wnq4ADXUhR2s8Jzhpb6h8VTkzYBHVy0Ek76KpEEdTG5zXwvl7PkbJtjtMLgXDRaCHAmb/E8c9S86pl8H2LfVu0ewRfsp8H2LfVu0ewRfsqPQ8esElxq7359+FLbLPUQ0txfWUk9PJSSSvYyPtYpGNkYHGRunFvLok70CRipPCh4axOrGPv1Syoo2iSopnWitE8UWt9sYux5xFrr2uuTqPS6hOIxPHPUvOqbfB9i31btHsEX7KfB9i31btHsEX7KwOW8dcHweltlTdr3yU9ypvHaWakpJ6pkkGge1JhY8NZog8ztDr3rtyTjZhWJixeUb23d9pn1drFJTzVRrYmiMkxCJji86lYQ0dSDsAgHTf4njnqXnVmfg+xb6t2j2CL9lddThNoo6eSe1W+ls1wia58FZQQshkjd3g7aOo6DbTsOHQgjovFQ8XcTuGM3/ACCO6GO1WDtBc5KmlmgfSlkTZXB0b2B++R7XDTTvfTZ6KSRVsVytDKunLnQVEAljL2OY4tc3Y21wBB0e4gEetZRjYkz/ALT1W8onjl84pZ5w1vNYKOw4nfqtzH2CpMj6yF1O4McJZmaBDi3m00d2xvuKyVy4ZZHlNHgk13zy7W26WF8dRcjjzm0tPeJWmMkSsIP4Mlh2zuIe4etSXhr/AKOsV/oql/6LVJF8vGpinEqpjlEyk80NtnCTGbTxLu+fQUk5ye6UzaOoqZKuV0YhAjHI2Iu5Gg9kwkhu9jv6lQ/OODeK4lglG6wV7eGFjx67x5PWS2SHsoZRA3cjZo2kB7HMaObYPxB0OlcS89xt9NdrfU0NbAypo6mJ0M0Eo22RjgQ5pHrBBIWpHRYb5QZPY7feLXUtrLZcKeOrpahgIbLE9ocxw316gg9V71X3Bq9y11pvVnOFTYPQY7dJrPb6Qt1BVUsQaI6iE8rRyO2dAA613lWCgIiICIiAiIgKsuHF1slbxW4oUtuxSay3SkqaFtxvEjSGXZzoCY3NPr7Nvon8qs1QvD/Pbz5zXzh8S81+2pvN7xfXa9n2R7ftddd9prW/UgmiIiAofxG+NjP9MR/9GZTBRHiIwluOyfxI7vEXH5NxyNH/AOXAf8V1bN97H5/oyp5q28KWyV2R+D/mlttlBU3OuqaNrIqSkhdLLKe0YSGsaCT0B7go14SOG11bknDrI4LbfrnYLBPWRXGjxWpmguEbJ4WsjmiEL2PcGFmnNYd8sh6EbV8ot8xdi1gyvA6Gv4I5PXYpjOYR3O8XuzvqIsiNXU3CqbT11L+F5JnvkDGxh3fohrCSAAFYPE6x3G4cbeHNdS2+qqaGltN+jqKmGFz44XyR0oja9wGmlxa7lB7+U67lbyKZRrHjWH3yl4TeDXTPslwirrRdqN9whdSSNkooxQ1TXmZutxjmc1pLtDbgD3qMcLuF1FbrNb+H2cYxxGrrtT3F0c89LcK82Kpb4wZY6vbZhA1vxXlug4OB9ElbhopkgFrraxd8f49eL4VZcqt9oul3qZcno7tby20OHZu/x6lqD3SPe1noMcQ7mJLWkbVkP8HjhfI9z38PcZc5x2XG1Qkk/mqeUdHBb6SClpYWU9NAxsUUMTQ1rGNGg0AdwAAGllaZGouN0mQUvDbgzgcuG5Iy74xk9v8AKtS62SCkhihlkBlbNrlkYQQ4OZsAfGLfXmW4ZfvgegojYrj44OJ3lA0/icnaeLeWjJ2/LrfZ9n6fP3cvXeltOixyjVnjJht+ul740vorFcauK4sxLxR0FJI8VJhrXOm7PQ9Pkbou1vlGt6CsSqx+4SeEBmlw8m1LrdU4XSUkVV2DjDLMKirLomu1pzgHNJaDvTh06hXEiuUagWC35db8Q4c2G/WvOYMbhwymijt+MwzQTvug218NW9nK+FrWdnyh7mR7LuY9NLN8GcPvlHcPB+NysFypH2HG7xQ1zquje0Uc4NNG1rnEabzBj+Q79Nuy3YW0iKZRrDxqw24VXHW041QRtfj/ABJZTvvrd9WC2SNlkd+SaF0cJ/2Qtmar/wANL/sH+xYOg4f4/bcxuWVwW1gyK4RNgqLhI98j+zaGgMZzEiNvoNJawAEjZ2eqzVdI2GiqJHuDWMjc5zj6gB1WdMdo7uGv+jrFf6Kpf+i1SRaK5j/hIbHwgttJh9vwu7XTILLTxUFUbjI2igEjIw0vZ0e97TrY21mwQR0O11eCN4X/ABH8JbjubZdqy049jVrttTc6i3W+g344A6OGON0sj3OZp87ZOZpG+z5dad05Mftxa/xn9Vnm3vRFCeMnEuzcJOHtxyO/RVtRbo3RUzoLaN1MrppGxNbGOZvpbfvoQeh0tCOjh9RZFNlmY3y4ZTR37GLpPAbDRUHK6Ohijj5JdvA9Jz39T6RHTprZCnqjXDjALJwuwq14vjlG6gs1AxzYKd7y9zeZ7nu24kkkuc4k79akqAiIgIiICIiAqy4cWqyUXFbihVW7K5r1dKupoXXGzyOJZaXNgIja0ertG+kfyKzVWXDi62St4rcUKW3YpNZbpSVNC243iRpDLs50BMbmn19m30T+VBZqIiAvNcrbTXehmo6yFtRTSjlfG/uPyfkIOiCOoIBC9KKxMxN4EPfgFUDqHLr3BGO5nLSSa/rPgLj/AMSSuPMCv+ud7/Q0P7spii6eJxfLpHsyvKHeYFf9c73+hof3ZPMCv+ud7/Q0P7spiicTieXSPYvKHeYFf9c73+hof3ZPMCv+ud7/AEND+7KYonE4nl0j2Lyh3mBX/XO9/oaH92UW4nYVntHg1zmwTJ6qvytoj8Sp7vHRtpnntGh/OWwNPRnORojqAraVa+Eda7LeuC+R0WQ5TNhdnlbB299p3Fr6XU8ZaQR/KcGs/rJxOJ5dI9i8svHgNyMbefMr0H6HMBDQ637MvrzAr/rne/0ND+7KWwACCMNdztDRp3y9O9dicTieXSPYvKHeYFf9c73+hof3ZPMCv+ud7/Q0P7spiicTieXSPYvKHeYFf9c73+hof3ZPMCv+ud7/AEND+7KYonE4nl0j2Lyh3mBX/XO9/oaH92XZFw9ZMWtul7uV6pgdupKsQMik7iA8RRMLh0+KTo9xBHRS1E4nF19Ij9i8qS8JHwTcO8JG0DypGbTklPHyUd+pIwZox3hkg6drHs/FJBGzyluzujfA/wDAouPDk8V7DxNx+huNpvMVJQUVwp6lp8cpg6V8zWPjcJomlwpy5p5Nlje/l6bvouVirO5cDqeOy4XaMZya/YbasXkZ2VFaav8AB1kILfwNRzgue3TSN736RPVZeDArtHxPrMnlzC5VFknpBTsxeRjPE4n6aO1B1zc3ok/1ipqiAiIgIiICIiAiIgKF4f57efOa+cPiXmv21N5veL67Xs+yPb9rrrvtNa36lNFTGQV9q8HnK77mF4rMhvNHm13oKLxeko31UNqeIzEx2m7LWPcWg6BJc5oAO0FzoiICIiAiIgIiICIiAq18I66WWy8F8jrchxabNLPE2Dt7FTtLn1W54w0AD+S4tf8A1VZSq3MM4u3ECxXq18IMkx6bKbTc4aC5T1xdNFQNJDpfRaNPeG9AN62HjYc0gBZ0BBgjLW8jS0ab8nTuXYuGghoDjs66nWtrlAREQEREBERAREQEREBERAREQEREBERAXBAPeNrlEFTXmjquBoz/AD2S45Vmlrrnw1oxmnY2qfROGmSupwdO5OXldybAaGHv30s60XOK9WmiuEMc0UNXAydkdTE6KVrXNDgHscAWuAPVpGwehXrWu/hFcQsZ8Gq/ycVLtkl4muFbbXWqkwuKsBp7rMxwdG8McD2Qj53c8gGgHjoXODJA2IRfmF4FXhYXvIPCsvlRl9VAPhBLIZRBGIoYaqJnLSNYN9GhgMI3tzuZpc4nZP6eoCIiAiIgLqqamGippaiolZBTwsMkksrg1jGgbLiT0AA67K0c/wAKRxlFgwSzcOaGflrb7IK64Na4bbSRO/BtI+R8o2D/APCflXo8DDwosv8ACXzqmtWRX6koTYbJI6vtFPbgRfi54jNU+T4sXJzRc0beUFzyWjlcWxhsUMuvPGBuHX/hbltm8zWXOXy3UzUj5ZquKJxYYYN6ADnBwLuh1yOaSNh1h2HGLRi0FTDZrZSWuKqqZKydlJC2ISzSHmfI7Q6uce8nqvRabRQ2C2U1utlHT2630sYigpaWJscUTB3Na1oAAHyBetAREQEREBERAREQEREBERAREQEREBERAREQEREBa++FHw/4XcXaahtWZUlwut5tjZfExZZyyooxNyF5JJ7Ic3ZxnUgJ0NtHUqyOK+bTYta6ejt7xHdriXMil1zdhG3XaS6PQkbaBvpzOBIIBCpGKJsLSGg+k4vc4kkucTsuJPUkkkknqSdlei+HfDI2mnfY3+vdGv8AByao3HwInUF/prniGR1dlNJM2emdcnNqJ43tPMx/PG1gBBAPct6qTjtf46SFlTjVvnqWsaJZY7o9jXu11Ib2B5QT6tnXylQVF6L5XsX0/Wr3M3ksD4ebz9VaL74f+7p8PN5+qtF98P8A3dV+ivyvYvp+tXuZvJYHw83n6q0X3w/93T4ebz9VaL74f+7qvZZWQRPkke2ONgLnPcdBoHeSfUF10VdTXOjgq6Ooiq6SdgkingeHskYRsOa4dCCOoIU+WbFy3frV7mbyUDxu8HG/8eeLtxzTIL/DBSVLmMitdK0l0FOwBrYmSOGt62S7l6ucTobVweDrwI4NcG8tt1+bQ3yjyak520t1vdcJYI3SMdG7RhDGDbHuG5WAdeh3oqSIQHAgjYPeCtdfwnY64tTTl/CZ/eZM3k2l71yqc4PZlLQ3GLGKuQvo5WONuc7viLG7dAP/AG8oc5o/ihrhvXKBca8XtezV7Jizh1flOsAiIuMEREBERAREQEREBERAREQEREBERAREQEREFC8XpnT8R5GO+LBbYGsB305pJS4/8dAf1VE1YnHCwvp7tbcgjaTBJGLfUuA+IeYuhJ+QbdI3fyuaPWq1rX1EVFUPpIY6iqbG4xRSyGNj369FrnAO5QToE6OvkPcv0b4dXTXslE090W6JU7kUNF6z3fXE7Hr7Mgl/dFzHec7dIwSYrY2MJHM5t/lJA9Z14oNrs3tOk9J9mKqrbxH4j5bTSZHYLddamldWSMpbY2kofEZII5nRkPlfMJw8hpJcAAHdA0jqffkGZZlDaeJOQUmRCCnxW5yR01tNDC5k8bIoZHMkeRzaIeQC0tI6kl3QCeUPCC32m+SV1svV8tdHLWePyWekrAyjfMXczjy8vMA4jZaHBp2eml6qvhZaa2xZfaX1FYKfJ55Kisc17OeNz42RkRnl0BqNuth3UlcEYGNl7apv+PfaeX59yobf75kme3nM6G03wY7abBSRxujbRxzyVk0tP2x5y/4rA1zWgN0SdnmHRS/gp/oewn+hqT/otXmvnB22Xi81Fzp7tebJUVlMykrha6psbKxjGlrO1BY70g0kBzdHXrXbQ0OSYTa7fYMesVvudnttLDS09VcLy6Cd7WMDfTY2mcN9O8Hr36Hct1FNdGJNdfnrPf2dnd2CcIoab1nvTWJ2P7d5BL+6KQWCqu9XROfebfS22qEhDYaOsdVMLNDTud0cZB3vpr1Dr16ddOJFU2i/SUZe3zupb9Yp4zqSO6Ugb37PNMxjgPytc4f8VtCtdsAsL8kze2xBhdS297bhUv10byk9k3fymQAj7I3fItiV5D45XTOLRTHOI7fz/vq2dwiIvNIIiICIiAiIgIiICIiAiIgIiICIiAiIgIiIPPX0FNdaKejq4WVFLOwxyRSDbXNPeCqRynhRerBM+S0wvvds72sa8CqiHyEOIEgHyg832E9TeyLv2TbcXY6r4fKecTyVqy+OshJEtpu8Lh05ZLZUNP8Ac6/lC+Oaf6Ouf3dP+wtqUX2/ntX0/X+EtDVbmn+jrn93T/sJzT/R1z+7p/2FtSifPavp+v8ABaGq3NP9HXP7un/YTmn+jrn93T/sLalE+e1fT9f4LQ1YaKl5022XRx/ktttQSfyDk6rO2HAckyWVohtstqpSfSrLpGY+Uevli2HuP2ENB/lBbFItdfxzFmLUURE9V7GFxPE6HD7UKKiDnFx55qiTRknfoAucR6+gGh0AAA0As0iLzlddWJVNdc3mUERFgCIiAiIgIiICIiAiIgIiIP/Z", + "text/plain": [ + "" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from IPython.display import Image\n", + "\n", + "Image(app.get_graph().draw_mermaid_png())" + ] + }, + { + "cell_type": "markdown", + "id": "74f3e276-f003-4112-ba14-c6952076c4f8", + "metadata": {}, + "source": [ + "## Invoke graph\n", + "\n", + "We can step through the execution as follows, printing out the summary as it is refined:" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "0701bb7d-fbc6-497e-a577-25d56e6e43c6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Apples are characterized by their red color.\n", + "Apples are characterized by their red color, while blueberries are known for their blue hue.\n", + "Apples are characterized by their red color, blueberries are known for their blue hue, and bananas are recognized for their yellow color.\n" + ] + } + ], + "source": [ + "async for step in app.astream(\n", + " {\"contents\": [doc.page_content for doc in documents]},\n", + " stream_mode=\"values\",\n", + "):\n", + " if summary := step.get(\"summary\"):\n", + " print(summary)" + ] + }, + { + "cell_type": "markdown", + "id": "49147724-de8b-44fd-bf13-5ef3432c7c6b", + "metadata": {}, + "source": [ + "The final `step` contains the summary as synthesized from the entire set of documents." + ] + }, + { + "cell_type": "markdown", + "id": "f15c225a-db1d-48cf-b135-f588e7d615e6", + "metadata": {}, + "source": [ + "## Next steps\n", + "\n", + "Check out the summarization [how-to guides](/docs/how_to/#summarization) for additional summarization strategies, including those designed for larger volumes of text.\n", + "\n", + "See [this tutorial](/docs/tutorials/summarization) for more detail on summarization.\n", + "\n", + "See also the [LangGraph documentation](https://langchain-ai.github.io/langgraph/) for detail on building with LangGraph." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/how_to/summarize_stuff.ipynb b/docs/docs/how_to/summarize_stuff.ipynb new file mode 100644 index 0000000000000..3c47398752a65 --- /dev/null +++ b/docs/docs/how_to/summarize_stuff.ipynb @@ -0,0 +1,209 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "c47f5b2f-e14c-43e7-a0ab-d71562636624", + "metadata": {}, + "source": [ + "---\n", + "sidebar_position: 3\n", + "keywords: [summarize, summarization, stuff, create_stuff_documents_chain]\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "682a4f53-27db-43ef-a909-dd9ded76051b", + "metadata": {}, + "source": [ + "# How to summarize text in a single LLM call\n", + "\n", + "LLMs can summarize and otherwise distill desired information from text, including large volumes of text. In many cases, especially for models with larger context windows, this can be adequately achieved via a single LLM call.\n", + "\n", + "LangChain implements a simple [pre-built chain](https://api.python.langchain.com/en/latest/chains/langchain.chains.combine_documents.stuff.create_stuff_documents_chain.html) that \"stuffs\" a prompt with the desired context for summarization and other purposes. In this guide we demonstrate how to use the chain." + ] + }, + { + "cell_type": "markdown", + "id": "4aa52e84-d1b5-4b33-b4c4-541156686ef3", + "metadata": {}, + "source": [ + "## Load chat model\n", + "\n", + "Let's first load a chat model:\n", + "```{=mdx}\n", + "import ChatModelTabs from \"@theme/ChatModelTabs\";\n", + "\n", + "\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "e5f426fc-cea6-4351-8931-1e422d3c8b69", + "metadata": {}, + "outputs": [], + "source": [ + "# | output: false\n", + "# | echo: false\n", + "\n", + "from langchain_openai import ChatOpenAI\n", + "\n", + "llm = ChatOpenAI(model=\"gpt-4o-mini\", temperature=0)" + ] + }, + { + "cell_type": "markdown", + "id": "b137fe82-0a53-4910-b53e-b87a297f329d", + "metadata": {}, + "source": [ + "## Load documents" + ] + }, + { + "cell_type": "markdown", + "id": "a81dc91d-ae72-4996-b809-d4a9050e815e", + "metadata": {}, + "source": [ + "Next, we need some documents to summarize. Below, we generate some toy documents for illustrative purposes. See the document loader [how-to guides](/docs/how_to/#document-loaders) and [integration pages](/docs/integrations/document_loaders/) for additional sources of data. The [summarization tutorial](/docs/tutorials/summarization) also includes an example summarizing a blog post." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "27c8fed0-b2d7-4549-a086-f5ee657efc41", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_core.documents import Document\n", + "\n", + "documents = [\n", + " Document(page_content=\"Apples are red\", metadata={\"title\": \"apple_book\"}),\n", + " Document(page_content=\"Blueberries are blue\", metadata={\"title\": \"blueberry_book\"}),\n", + " Document(page_content=\"Bananas are yelow\", metadata={\"title\": \"banana_book\"}),\n", + "]" + ] + }, + { + "cell_type": "markdown", + "id": "84216044-6f1e-4b90-b4fa-29ec305abf51", + "metadata": {}, + "source": [ + "## Load chain\n", + "\n", + "Below, we define a simple prompt and instantiate the chain with our chat model and documents:" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "669afa40-2708-4fa1-841e-c74a67bd9175", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.chains.combine_documents import create_stuff_documents_chain\n", + "from langchain_core.prompts import ChatPromptTemplate\n", + "\n", + "prompt = ChatPromptTemplate.from_template(\"Summarize this content: {context}\")\n", + "chain = create_stuff_documents_chain(llm, prompt)" + ] + }, + { + "cell_type": "markdown", + "id": "74f3e276-f003-4112-ba14-c6952076c4f8", + "metadata": {}, + "source": [ + "## Invoke chain\n", + "\n", + "Because the chain is a [Runnable](/docs/concepts/#runnable-interface), it implements the usual methods for invocation:" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "0701bb7d-fbc6-497e-a577-25d56e6e43c6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'The content describes the colors of three fruits: apples are red, blueberries are blue, and bananas are yellow.'" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "result = chain.invoke({\"context\": documents})\n", + "result" + ] + }, + { + "cell_type": "markdown", + "id": "14fb5647-1458-43af-afb7-5aae7b8cab1d", + "metadata": {}, + "source": [ + "### Streaming\n", + "\n", + "Note that the chain also supports streaming of individual output tokens:" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "0d7a5f67-2ec8-4f90-b085-2969fcb14dce", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "|The| content| describes| the| colors| of| three| fruits|:| apples| are| red|,| blueberries| are| blue|,| and| bananas| are| yellow|.||" + ] + } + ], + "source": [ + "for chunk in chain.stream({\"context\": documents}):\n", + " print(chunk, end=\"|\")" + ] + }, + { + "cell_type": "markdown", + "id": "f15c225a-db1d-48cf-b135-f588e7d615e6", + "metadata": {}, + "source": [ + "## Next steps\n", + "\n", + "See the summarization [how-to guides](/docs/how_to/#summarization) for additional summarization strategies, including those designed for larger volumes of text.\n", + "\n", + "See also [this tutorial](/docs/tutorials/summarization) for more detail on summarization." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/tutorials/summarization.ipynb b/docs/docs/tutorials/summarization.ipynb index 0daae27ff4be0..a4f0746d32682 100644 --- a/docs/docs/tutorials/summarization.ipynb +++ b/docs/docs/tutorials/summarization.ipynb @@ -18,6 +18,14 @@ "source": [ "# Summarize Text\n", "\n", + ":::{.callout-info}\n", + "\n", + "This tutorial demonstrates text summarization using built-in chains and [LangGraph](https://langchain-ai.github.io/langgraph/).\n", + "\n", + "A [previous version](https://python.langchain.com/v0.1/docs/use_cases/summarization/) of this page showcased the legacy chains [StuffDocumentsChain](/docs/versions/migrating_chains/stuff_docs_chain/), [MapReduceDocumentsChain](/docs/versions/migrating_chains/map_reduce_chain/), and [RefineDocumentsChain](https://python.langchain.com/v0.2/docs/versions/migrating_chains/refine_docs_chain/). See [here](/docs/versions/migrating_chains/) for information on using those abstractions and a comparison with the methods demonstrated in this tutorial.\n", + "\n", + ":::\n", + "\n", "Suppose you have a set of documents (PDFs, Notion pages, customer questions, etc.) and you want to summarize the content. \n", "\n", "LLMs are a great tool for this given their proficiency in understanding and synthesizing text.\n", @@ -48,12 +56,11 @@ "\n", "- Using [document loaders](/docs/concepts/#document-loaders), specifically the [WebBaseLoader](https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.web_base.WebBaseLoader.html) to load content from an HTML webpage.\n", "\n", - "- Three ways to summarize or otherwise combine documents.\n", + "- Two ways to summarize or otherwise combine documents.\n", " 1. [Stuff](/docs/tutorials/summarization#stuff), which simply concatenates documents into a prompt;\n", - " 2. [Map-reduce](/docs/tutorials/summarization#map-reduce), which splits documents into batches, summarizes those, and then summarizes the summaries;\n", - " 3. [Refine](/docs/tutorials/summarization#refine), which updates a rolling summary be iterating over the documents in a sequence.\n", + " 2. [Map-reduce](/docs/tutorials/summarization#map-reduce), for larger sets of documents. This splits documents into batches, summarizes those, and then summarizes the summaries.\n", "\n", - "That's a fair amount to cover! Let's dive in.\n", + "Shorter, targeted guides on these strategies and others, including [iterative refinement](/docs/how_to/summarize_refine), can be found in the [how-to guides](/docs/how_to/#summarization).\n", "\n", "## Setup\n", "\n", @@ -117,15 +124,13 @@ "source": [ "## Overview\n", "\n", - "A central question for building a summarizer is how to pass your documents into the LLM's context window. Three common approaches for this are:\n", + "A central question for building a summarizer is how to pass your documents into the LLM's context window. Two common approaches for this are:\n", "\n", "1. `Stuff`: Simply \"stuff\" all your documents into a single prompt. This is the simplest approach (see [here](/docs/tutorials/rag#built-in-chains) for more on the `create_stuff_documents_chain` constructor, which is used for this method).\n", "\n", "2. `Map-reduce`: Summarize each document on its own in a \"map\" step and then \"reduce\" the summaries into a final summary (see [here](https://api.python.langchain.com/en/latest/chains/langchain.chains.combine_documents.map_reduce.MapReduceDocumentsChain.html) for more on the `MapReduceDocumentsChain`, which is used for this method).\n", "\n", - "3. `Refine`: Update a rolling summary be iterating over the documents in a sequence.\n", - " \n", - " " + "Note that map-reduce is especially effective when understanding of a sub-document does not rely on preceeding context. For example, when summarizing a corpus of many, shorter documents. In other cases, such as summarizing a novel or body of text with an inherent sequence, [iterative refinement](/docs/how_to/summarize_refine) may be more effective." ] }, { @@ -141,11 +146,7 @@ "id": "bea785ac", "metadata": {}, "source": [ - "## Quickstart\n", - "\n", - "To give you a sneak preview, either pipeline can be wrapped in a single object: `load_summarize_chain`. \n", - "\n", - "Suppose we want to summarize a blog post. We can create this in a few lines of code.\n", + "## Setup\n", "\n", "First set environment variables and install packages:" ] @@ -157,7 +158,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install --upgrade --quiet langchain-openai tiktoken chromadb langchain beautifulsoup4\n", + "%pip install --upgrade --quiet tiktoken langchain langgraph beautifulsoup4\n", "\n", "# Set env var OPENAI_API_KEY or load from a .env file\n", "# import dotenv\n", @@ -165,21 +166,6 @@ "# dotenv.load_dotenv()" ] }, - { - "cell_type": "markdown", - "id": "36138740", - "metadata": {}, - "source": [ - "We can use `chain_type=\"stuff\"`, especially if using larger context window models such as:\n", - "\n", - "* 128k token OpenAI `gpt-4-turbo-2024-04-09` \n", - "* 200k token Anthropic `claude-3-sonnet-20240229`\n", - "\n", - "We can also supply `chain_type=\"map_reduce\"` or `chain_type=\"refine\"`.\n", - "\n", - "First we load in our documents. We will use [WebBaseLoader](https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.web_base.WebBaseLoader.html) to load a blog post:" - ] - }, { "cell_type": "code", "execution_count": 2, @@ -189,37 +175,59 @@ "source": [ "import os\n", "\n", - "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"True\"" + "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"" + ] + }, + { + "cell_type": "markdown", + "id": "21541329-f883-42ca-bc94-ab9793951dfa", + "metadata": {}, + "source": [ + "First we load in our documents. We will use [WebBaseLoader](https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.web_base.WebBaseLoader.html) to load a blog post:" ] }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 4, "id": "23154e97-c4cb-4bcb-a742-f0c9d06639da", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The article discusses the concept of LLM-powered autonomous agents, with a focus on the components of planning, memory, and tool use. It includes case studies and proof-of-concept examples, as well as challenges and references to related research. The author emphasizes the potential of LLMs in creating powerful problem-solving agents, while also highlighting limitations such as finite context length and reliability of natural language interfaces.\n" - ] - } - ], + "outputs": [], "source": [ - "from langchain.chains.summarize import load_summarize_chain\n", "from langchain_community.document_loaders import WebBaseLoader\n", - "from langchain_openai import ChatOpenAI\n", "\n", "loader = WebBaseLoader(\"https://lilianweng.github.io/posts/2023-06-23-agent/\")\n", - "docs = loader.load()\n", + "docs = loader.load()" + ] + }, + { + "cell_type": "markdown", + "id": "22548ae0-7f67-4dd0-a3f8-d6675b38df53", + "metadata": {}, + "source": [ + "Let's next select a LLM:\n", "\n", - "llm = ChatOpenAI(temperature=0, model_name=\"gpt-3.5-turbo-1106\")\n", - "chain = load_summarize_chain(llm, chain_type=\"stuff\")\n", + "```{=mdx}\n", + "import ChatModelTabs from \"@theme/ChatModelTabs\";\n", "\n", - "result = chain.invoke(docs)\n", + "\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "b1c639d9-b27c-4e71-9312-d2666b05f1e3", + "metadata": {}, + "outputs": [], + "source": [ + "# | output: false\n", + "# | echo: false\n", "\n", - "print(result[\"output_text\"])" + "from langchain_openai import ChatOpenAI\n", + "\n", + "llm = ChatOpenAI(model=\"gpt-4o-mini\", temperature=0)" ] }, { @@ -227,16 +235,19 @@ "id": "615b36e1", "metadata": {}, "source": [ - "## Option 1. Stuff {#stuff}\n", + "## Stuff: summarize in a single LLM call {#stuff}\n", "\n", - "When we use `load_summarize_chain` with `chain_type=\"stuff\"`, we will use the [StuffDocumentsChain](https://api.python.langchain.com/en/latest/chains/langchain.chains.combine_documents.stuff.StuffDocumentsChain.html#langchain.chains.combine_documents.stuff.StuffDocumentsChain).\n", + "We can use [create_stuff_documents_chain](https://api.python.langchain.com/en/latest/chains/langchain.chains.combine_documents.stuff.create_stuff_documents_chain.html), especially if using larger context window models such as:\n", + "\n", + "* 128k token OpenAI `gpt-4o` \n", + "* 200k token Anthropic `claude-3-5-sonnet-20240620`\n", "\n", "The chain will take a list of documents, insert them all into a prompt, and pass that prompt to an LLM:" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 6, "id": "ef45585d", "metadata": {}, "outputs": [ @@ -244,30 +255,73 @@ "name": "stdout", "output_type": "stream", "text": [ - "The article discusses the concept of building autonomous agents powered by large language models (LLMs). It explores the components of such agents, including planning, memory, and tool use. The article provides case studies and examples of proof-of-concept demos, highlighting the challenges and limitations of LLM-powered agents. It also includes references to related research papers and projects.\n" + "The article \"LLM Powered Autonomous Agents\" by Lilian Weng discusses the development and capabilities of autonomous agents powered by large language models (LLMs). It outlines a system architecture that includes three main components: planning, memory, and tool use. \n", + "\n", + "1. **Planning**: Agents decompose complex tasks into manageable subgoals and engage in self-reflection to improve their performance over time. Techniques like Chain of Thought (CoT) and Tree of Thoughts (ToT) are highlighted for enhancing reasoning and planning.\n", + "\n", + "2. **Memory**: The article distinguishes between short-term and long-term memory, explaining how agents can utilize in-context learning and external vector stores for information retrieval. Maximum Inner Product Search (MIPS) algorithms are discussed for efficient memory access.\n", + "\n", + "3. **Tool Use**: The integration of external tools allows agents to extend their capabilities beyond their inherent knowledge. Examples include MRKL systems and frameworks like HuggingGPT, which facilitate task planning and execution through API calls.\n", + "\n", + "The article also addresses challenges faced by LLM-powered agents, such as finite context length, difficulties in long-term planning, and the reliability of natural language interfaces. It concludes with case studies demonstrating the practical applications of these agents in scientific discovery and interactive simulations.\n", + "\n", + "Overall, the article emphasizes the potential of LLMs as general problem solvers and their ability to function as autonomous agents in various domains.\n" ] } ], "source": [ - "from langchain.chains.combine_documents.stuff import StuffDocumentsChain\n", + "from langchain.chains.combine_documents import create_stuff_documents_chain\n", "from langchain.chains.llm import LLMChain\n", - "from langchain_core.prompts import PromptTemplate\n", + "from langchain_core.prompts import ChatPromptTemplate\n", "\n", "# Define prompt\n", - "prompt_template = \"\"\"Write a concise summary of the following:\n", - "\"{text}\"\n", - "CONCISE SUMMARY:\"\"\"\n", - "prompt = PromptTemplate.from_template(prompt_template)\n", + "prompt = ChatPromptTemplate.from_messages(\n", + " [(\"system\", \"Write a concise summary of the following:\\\\n\\\\n{context}\")]\n", + ")\n", "\n", - "# Define LLM chain\n", - "llm = ChatOpenAI(temperature=0, model_name=\"gpt-3.5-turbo-16k\")\n", - "llm_chain = LLMChain(llm=llm, prompt=prompt)\n", + "# Instantiate chain\n", + "chain = create_stuff_documents_chain(llm, prompt)\n", "\n", - "# Define StuffDocumentsChain\n", - "stuff_chain = StuffDocumentsChain(llm_chain=llm_chain, document_variable_name=\"text\")\n", + "# Invoke chain\n", + "result = chain.invoke({\"context\": docs})\n", + "print(result)" + ] + }, + { + "cell_type": "markdown", + "id": "02d5a634-203c-4e43-ac55-4e502be095d3", + "metadata": {}, + "source": [ + "### Streaming\n", "\n", - "docs = loader.load()\n", - "print(stuff_chain.invoke(docs)[\"output_text\"])" + "Note that we can also stream the result token-by-token:" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "b7a89b7a-0141-4689-b768-a2a50cdce7da", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "|The| article| \"|LL|M| Powered| Autonomous| Agents|\"| by| Lil|ian| W|eng| discusses| the| development| and| capabilities| of| autonomous| agents| powered| by| large| language| models| (|LL|Ms|).| It| outlines| a| system| overview| that| includes| three| main| components|:| planning|,| memory|,| and| tool| use|.| \n", + "\n", + "|1|.| **|Planning|**| involves| task| decomposition|,| where| agents| break| down| complex| tasks| into| manageable| sub|go|als|,| and| self|-ref|lection|,| allowing| agents| to| learn| from| past| actions| to| improve| future| performance|.\n", + "\n", + "|2|.| **|Memory|**| is| categorized| into| short|-term| and| long|-term| memory|,| with| techniques| like| Maximum| Inner| Product| Search| (|M|IPS|)| used| for| efficient| information| retrieval|.\n", + "\n", + "|3|.| **|Tool| Use|**| highlights| the| integration| of| external| APIs| to| enhance| the| agent|'s| capabilities|,| illustrated| through| case| studies| like| Chem|Crow| for| scientific| discovery| and| Gener|ative| Agents| for| sim|ulating| human| behavior|.\n", + "\n", + "|The| article| also| addresses| challenges| such| as| finite| context| length|,| difficulties| in| long|-term| planning|,| and| the| reliability| of| natural| language| interfaces|.| It| concludes| with| references| to| various| studies| and| projects| that| contribute| to| the| field| of| L|LM|-powered| agents|.||" + ] + } + ], + "source": [ + "for token in chain.stream({\"context\": docs}):\n", + " print(token, end=\"|\")" ] }, { @@ -275,8 +329,6 @@ "id": "4e4e4a43", "metadata": {}, "source": [ - "Great! We can see that we reproduce the earlier result using the `load_summarize_chain`.\n", - "\n", "### Go deeper\n", "\n", "* You can easily customize the prompt. \n", @@ -288,32 +340,37 @@ "id": "ad6cabee", "metadata": {}, "source": [ - "## Option 2. Map-Reduce {#map-reduce}\n", + "## Map-Reduce: summarize long texts via parallelization {#map-reduce}\n", "\n", - "Let's unpack the map reduce approach. For this, we'll first map each document to an individual summary using an `LLMChain`. Then we'll use a `ReduceDocumentsChain` to combine those summaries into a single global summary.\n", - " \n", - "First, we specify the LLMChain to use for mapping each document to an individual summary:" + "Let's unpack the map reduce approach. For this, we'll first map each document to an individual summary using an LLM. Then we'll reduce or consolidate those summaries into a single global summary.\n", + "\n", + "Note that the map step is typically parallelized over the input documents.\n", + "\n", + "[LangGraph](https://langchain-ai.github.io/langgraph/), built on top of `langchain-core`, suports [map-reduce](https://langchain-ai.github.io/langgraph/how-tos/map-reduce/) workflows and is well-suited to this problem:\n", + "\n", + "- LangGraph allows for individual steps (such as successive summarizations) to be streamed, allowing for greater control of execution;\n", + "- LangGraph's [checkpointing](https://langchain-ai.github.io/langgraph/how-tos/persistence/) supports error recovery, extending with human-in-the-loop workflows, and easier incorporation into conversational applications.\n", + "- The LangGraph implementation is straightforward to modify and extend, as we will see below.\n", + "\n", + "### Map\n", + "Let's first define the prompt associated with the map step, and associated it with the LLM via a [chain](/docs/how_to/sequence/). We can use the same summarization prompt as in the `stuff` approach, above:" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 8, "id": "a1e6773c", "metadata": {}, "outputs": [], "source": [ - "from langchain.chains import MapReduceDocumentsChain, ReduceDocumentsChain\n", - "from langchain_text_splitters import CharacterTextSplitter\n", + "from langchain_core.output_parsers import StrOutputParser\n", + "from langchain_core.prompts import ChatPromptTemplate\n", "\n", - "llm = ChatOpenAI(temperature=0)\n", + "map_prompt = ChatPromptTemplate.from_messages(\n", + " [(\"system\", \"Write a concise summary of the following:\\\\n\\\\n{context}\")]\n", + ")\n", "\n", - "# Map\n", - "map_template = \"\"\"The following is a set of documents\n", - "{docs}\n", - "Based on this list of docs, please identify the main themes \n", - "Helpful Answer:\"\"\"\n", - "map_prompt = PromptTemplate.from_template(map_template)\n", - "map_chain = LLMChain(llm=llm, prompt=map_prompt)" + "map_chain = map_prompt | llm | StrOutputParser()" ] }, { @@ -330,15 +387,14 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 10, "id": "ce48b805-d98b-4e0f-8b9e-3b3e72cad3d3", "metadata": {}, "outputs": [], "source": [ "from langchain import hub\n", "\n", - "map_prompt = hub.pull(\"rlm/map-prompt\")\n", - "map_chain = LLMChain(llm=llm, prompt=map_prompt)" + "map_prompt = hub.pull(\"rlm/map-prompt\")" ] }, { @@ -346,96 +402,49 @@ "id": "bee3c331", "metadata": {}, "source": [ - "The `ReduceDocumentsChain` handles taking the document mapping results and reducing them into a single output. It wraps a generic `CombineDocumentsChain` (like `StuffDocumentsChain`) but adds the ability to collapse documents before passing it to the `CombineDocumentsChain` if their cumulative size exceeds `token_max`. In this example, we can actually re-use our chain for combining our docs to also collapse our docs.\n", + "### Reduce\n", "\n", - "So if the cumulative number of tokens in our mapped documents exceeds 4000 tokens, then we'll recursively pass in the documents in batches of < 4000 tokens to our `StuffDocumentsChain` to create batched summaries. And once those batched summaries are cumulatively less than 4000 tokens, we'll pass them all one last time to the `StuffDocumentsChain` to create the final summary." + "We also define a chain that takes the document mapping results and reduces them into a single output." ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 11, "id": "6a718890-99ab-439a-8f79-b9ae9c58ad24", "metadata": {}, "outputs": [], "source": [ - "# Reduce\n", - "reduce_template = \"\"\"The following is set of summaries:\n", + "# Also available via the hub: `hub.pull(\"rlm/reduce-prompt\")`\n", + "reduce_template = \"\"\"\n", + "The following is a set of summaries:\n", "{docs}\n", - "Take these and distill it into a final, consolidated summary of the main themes. \n", - "Helpful Answer:\"\"\"\n", - "reduce_prompt = PromptTemplate.from_template(reduce_template)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "f189184a-673e-4530-8a6b-57b091045d87", - "metadata": {}, - "outputs": [], - "source": [ - "# Note we can also get this from the prompt hub, as noted above\n", - "reduce_prompt = hub.pull(\"rlm/reduce-prompt\")" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "c9d1da97-d590-4a96-82b2-8002d27fd7f6", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "ChatPromptTemplate(input_variables=['docs'], metadata={'lc_hub_owner': 'rlm', 'lc_hub_repo': 'map-prompt', 'lc_hub_commit_hash': 'de4fba345f211a462584fc25b7077e69c1ba6cdcf4e21b7ec9abe457ddb16c87'}, messages=[HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=['docs'], template='The following is a set of documents:\\n{docs}\\nBased on this list of docs, please identify the main themes \\nHelpful Answer:'))])" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "reduce_prompt" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "1edb1b0d", - "metadata": {}, - "outputs": [], - "source": [ - "# Run chain\n", - "reduce_chain = LLMChain(llm=llm, prompt=reduce_prompt)\n", + "Take these and distill it into a final, consolidated summary\n", + "of the main themes.\n", + "\"\"\"\n", "\n", - "# Takes a list of documents, combines them into a single string, and passes this to an LLMChain\n", - "combine_documents_chain = StuffDocumentsChain(\n", - " llm_chain=reduce_chain, document_variable_name=\"docs\"\n", - ")\n", + "reduce_prompt = ChatPromptTemplate([(\"human\", reduce_template)])\n", "\n", - "# Combines and iteratively reduces the mapped documents\n", - "reduce_documents_chain = ReduceDocumentsChain(\n", - " # This is final chain that is called.\n", - " combine_documents_chain=combine_documents_chain,\n", - " # If documents exceed context for `StuffDocumentsChain`\n", - " collapse_documents_chain=combine_documents_chain,\n", - " # The maximum number of tokens to group documents into.\n", - " token_max=4000,\n", - ")" + "reduce_chain = reduce_prompt | llm | StrOutputParser()" ] }, { "cell_type": "markdown", - "id": "fdb5ae1a", + "id": "3d7df564-415a-49e2-80b6-743446b40be5", "metadata": {}, "source": [ - "Combining our map and reduce chains into one:" + "### Orchestration via LangGraph\n", + "\n", + "Below we implement a simple application that maps the summarization step on a list of documents, then reduces them using the above prompts.\n", + "\n", + "Map-reduce flows are particularly useful when texts are long compared to the context window of a LLM. For long texts, we need a mechanism that ensures that the context to be summarized in the reduce step does not exceed a model's context window size. Here we implement a recursive \"collapsing\" of the summaries: the inputs are partitioned based on a token limit, and summaries are generated of the partitions. This step is repeated until the total length of the summaries is within a desired limit, allowing for the summarization of arbitrary-length text.\n", + "\n", + "First we chunk the blog post into smaller \"sub documents\" to be mapped:" ] }, { "cell_type": "code", "execution_count": 12, - "id": "22f1cdc2", + "id": "7821efb9-e1de-4234-84d2-75dfe13b5a6c", "metadata": {}, "outputs": [ { @@ -444,242 +453,287 @@ "text": [ "Created a chunk of size 1003, which is longer than the specified 1000\n" ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Generated 14 documents.\n" + ] } ], "source": [ - "# Combining documents by mapping a chain over them, then combining results\n", - "map_reduce_chain = MapReduceDocumentsChain(\n", - " # Map chain\n", - " llm_chain=map_chain,\n", - " # Reduce chain\n", - " reduce_documents_chain=reduce_documents_chain,\n", - " # The variable name in the llm_chain to put the documents in\n", - " document_variable_name=\"docs\",\n", - " # Return the results of the map steps in the output\n", - " return_intermediate_steps=False,\n", - ")\n", + "from langchain_text_splitters import CharacterTextSplitter\n", "\n", "text_splitter = CharacterTextSplitter.from_tiktoken_encoder(\n", " chunk_size=1000, chunk_overlap=0\n", ")\n", - "split_docs = text_splitter.split_documents(docs)" + "split_docs = text_splitter.split_documents(docs)\n", + "print(f\"Generated {len(split_docs)} documents.\")" ] }, { - "cell_type": "code", - "execution_count": 16, - "id": "d7e53f93-c5aa-456a-85f4-a6b3301a34ed", + "cell_type": "markdown", + "id": "3e7f1c8a-070e-47f0-bcf2-16d6191051ac", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The main themes identified in the list of documents provided are related to large language models (LLMs), autonomous agents, prompting, steering language models, natural language processing (NLP), the use of tools to augment language models, reinforcement learning, reasoning, acting, self-reflection, and the integration of language models with external knowledge sources.\n" - ] - } - ], "source": [ - "result = map_reduce_chain.invoke(split_docs)\n", - "\n", - "print(result[\"output_text\"])" + "Next, we define our graph. Note that we define an artificially low maximum token length of 1,000 tokens to illustrate the \"collapsing\" step." ] }, { - "cell_type": "markdown", - "id": "e62c21cf", + "cell_type": "code", + "execution_count": 13, + "id": "10ced55c-9e3e-404f-abe9-83ac29ffaa5a", "metadata": {}, + "outputs": [], "source": [ - "If we follow the [Langsmith Trace](https://smith.langchain.com/public/3a1a6d51-68e5-4805-8d90-78920ce60a51/r), we can see the the individual LLM summarizations, including the [final call](https://smith.langchain.com/public/69482813-f0b7-46b0-a99f-86d56fc9644a/r) that summarizes the summaries.\n", + "import operator\n", + "from typing import Annotated, List, Literal, TypedDict\n", "\n", - "### Go deeper\n", - " \n", - "**Customization** \n", + "from langchain.chains.combine_documents.reduce import (\n", + " acollapse_docs,\n", + " split_list_of_docs,\n", + ")\n", + "from langchain_core.documents import Document\n", + "from langgraph.constants import Send\n", + "from langgraph.graph import END, START, StateGraph\n", "\n", - "* As shown above, you can customize the LLMs and prompts for map and reduce stages.\n", + "token_max = 1000\n", "\n", - "**Real-world use-case**\n", "\n", - "* See [this blog post](https://blog.langchain.dev/llms-to-improve-documentation/) case-study on analyzing user interactions (questions about LangChain documentation)! \n", - "* The blog post and associated [repo](https://github.com/mendableai/QA_clustering) also introduce clustering as a means of summarization.\n", - "* This opens up another path beyond the `stuff` or `map-reduce` approaches that is worth considering.\n", + "def length_function(documents: List[Document]) -> int:\n", + " \"\"\"Get number of tokens for input contents.\"\"\"\n", + " return sum(llm.get_num_tokens(doc.page_content) for doc in documents)\n", "\n", - "![Image description](../../static/img/summarization_use_case_3.png)" + "\n", + "# This will be the overall state of the main graph.\n", + "# It will contain the input document contents, corresponding\n", + "# summaries, and a final summary.\n", + "class OverallState(TypedDict):\n", + " # Notice here we use the operator.add\n", + " # This is because we want combine all the summaries we generate\n", + " # from individual nodes back into one list - this is essentially\n", + " # the \"reduce\" part\n", + " contents: List[str]\n", + " summaries: Annotated[list, operator.add]\n", + " collapsed_summaries: List[Document]\n", + " final_summary: str\n", + "\n", + "\n", + "# This will be the state of the node that we will \"map\" all\n", + "# documents to in order to generate summaries\n", + "class SummaryState(TypedDict):\n", + " content: str\n", + "\n", + "\n", + "# Here we generate a summary, given a document\n", + "async def generate_summary(state: SummaryState):\n", + " response = await map_chain.ainvoke(state[\"content\"])\n", + " return {\"summaries\": [response]}\n", + "\n", + "\n", + "# Here we define the logic to map out over the documents\n", + "# We will use this an edge in the graph\n", + "def map_summaries(state: OverallState):\n", + " # We will return a list of `Send` objects\n", + " # Each `Send` object consists of the name of a node in the graph\n", + " # as well as the state to send to that node\n", + " return [\n", + " Send(\"generate_summary\", {\"content\": content}) for content in state[\"contents\"]\n", + " ]\n", + "\n", + "\n", + "def collect_summaries(state: OverallState):\n", + " return {\n", + " \"collapsed_summaries\": [Document(summary) for summary in state[\"summaries\"]]\n", + " }\n", + "\n", + "\n", + "# Add node to collapse summaries\n", + "async def collapse_summaries(state: OverallState):\n", + " doc_lists = split_list_of_docs(\n", + " state[\"collapsed_summaries\"], length_function, token_max\n", + " )\n", + " results = []\n", + " for doc_list in doc_lists:\n", + " results.append(await acollapse_docs(doc_list, reduce_chain.ainvoke))\n", + "\n", + " return {\"collapsed_summaries\": results}\n", + "\n", + "\n", + "# This represents a conditional edge in the graph that determines\n", + "# if we should collapse the summaries or not\n", + "def should_collapse(\n", + " state: OverallState,\n", + ") -> Literal[\"collapse_summaries\", \"generate_final_summary\"]:\n", + " num_tokens = length_function(state[\"collapsed_summaries\"])\n", + " if num_tokens > token_max:\n", + " return \"collapse_summaries\"\n", + " else:\n", + " return \"generate_final_summary\"\n", + "\n", + "\n", + "# Here we will generate the final summary\n", + "async def generate_final_summary(state: OverallState):\n", + " response = await reduce_chain.ainvoke(state[\"collapsed_summaries\"])\n", + " return {\"final_summary\": response}\n", + "\n", + "\n", + "# Construct the graph\n", + "# Nodes:\n", + "graph = StateGraph(OverallState)\n", + "graph.add_node(\"generate_summary\", generate_summary) # same as before\n", + "graph.add_node(\"collect_summaries\", collect_summaries)\n", + "graph.add_node(\"collapse_summaries\", collapse_summaries)\n", + "graph.add_node(\"generate_final_summary\", generate_final_summary)\n", + "\n", + "# Edges:\n", + "graph.add_conditional_edges(START, map_summaries, [\"generate_summary\"])\n", + "graph.add_edge(\"generate_summary\", \"collect_summaries\")\n", + "graph.add_conditional_edges(\"collect_summaries\", should_collapse)\n", + "graph.add_conditional_edges(\"collapse_summaries\", should_collapse)\n", + "graph.add_edge(\"generate_final_summary\", END)\n", + "\n", + "app = graph.compile()" ] }, { "cell_type": "markdown", - "id": "f08ff365", + "id": "f00af5d5-bfac-4c13-9439-aa0b18ac3b44", "metadata": {}, "source": [ - "## Option 3. Refine {#refine}\n", - " \n", - "[RefineDocumentsChain](https://api.python.langchain.com/en/latest/chains/langchain.chains.combine_documents.refine.RefineDocumentsChain.html) is similar to map-reduce:\n", - "\n", - "> The refine documents chain constructs a response by looping over the input documents and iteratively updating its answer. For each document, it passes all non-document inputs, the current document, and the latest intermediate answer to an LLM chain to get a new answer.\n", - "\n", - "This can be easily run with the `chain_type=\"refine\"` specified." + "LangGraph allows the graph structure to be plotted to help visualize its function:" ] }, { "cell_type": "code", - "execution_count": 21, - "id": "de1dc10e", + "execution_count": 14, + "id": "0c8d41e4-664d-46f4-94e9-248971d428a6", "metadata": {}, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "The existing summary provides detailed instructions for implementing a project's architecture through code, focusing on creating core classes, functions, and methods in different files following best practices for the chosen language and framework. Assumptions about the model, view, and controller components are also outlined. The additional context highlights challenges in long-term planning and task decomposition, as well as the reliability issues with natural language interfaces in LLM-powered autonomous agents. These insights shed light on the limitations and potential pitfalls of using LLMs in agent systems, with references to recent research on LLM-powered autonomous agents and related technologies.\n" - ] + "data": { + "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/4gHYSUNDX1BST0ZJTEUAAQEAAAHIAAAAAAQwAABtbnRyUkdCIFhZWiAH4AABAAEAAAAAAABhY3NwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAA9tYAAQAAAADTLQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlkZXNjAAAA8AAAACRyWFlaAAABFAAAABRnWFlaAAABKAAAABRiWFlaAAABPAAAABR3dHB0AAABUAAAABRyVFJDAAABZAAAAChnVFJDAAABZAAAAChiVFJDAAABZAAAAChjcHJ0AAABjAAAADxtbHVjAAAAAAAAAAEAAAAMZW5VUwAAAAgAAAAcAHMAUgBHAEJYWVogAAAAAAAAb6IAADj1AAADkFhZWiAAAAAAAABimQAAt4UAABjaWFlaIAAAAAAAACSgAAAPhAAAts9YWVogAAAAAAAA9tYAAQAAAADTLXBhcmEAAAAAAAQAAAACZmYAAPKnAAANWQAAE9AAAApbAAAAAAAAAABtbHVjAAAAAAAAAAEAAAAMZW5VUwAAACAAAAAcAEcAbwBvAGcAbABlACAASQBuAGMALgAgADIAMAAxADb/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsKCwsNDhIQDQ4RDgsLEBYQERMUFRUVDA8XGBYUGBIUFRT/2wBDAQMEBAUEBQkFBQkUDQsNFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBT/wAARCAHXARsDASIAAhEBAxEB/8QAHQABAAMAAwEBAQAAAAAAAAAAAAUGBwMECAECCf/EAFcQAAEEAQIDAggHCgoJAwMFAAEAAgMEBQYRBxIhEzEIFBYiQVFWlBUXVZOV0dMyQlJTVGGBs9LUCSM3OHF1dpKhtDM0NmJygpGxsiQ1dCZEw3ODheHw/8QAGgEBAQEBAQEBAAAAAAAAAAAAAAECBAMFB//EADMRAQABAgIHBQcFAQEAAAAAAAABAhEDkRIUIVFSYdEEEzFToSNBcbHB0uEVM4Gi8EIy/9oADAMBAAIRAxEAPwD+qaIiAiIgIiICIuhmsxFhaYmfHJYle9sUNaAAyTSOPRjQSB6ySSA0AuJABIsRNU2gd9R02o8TXeWS5SlE8fevsMB/xKifI92dHballGQc4f8At0TnClEN/ueXp2p9Bc/v6kNYDyqRj0jgoW8seFxzG777Nqxgb/8ARe+jhU7Kpmfh/vo1sffKrCfLFD3pn1p5VYT5Yoe9M+tffJbC/JFD3Zn1J5LYX5Ioe7M+pPY8/Q2PnlVhPlih70z608qsJ8sUPemfWvvkthfkih7sz6k8lsL8kUPdmfUnsefobHzyqwnyxQ96Z9aeVWE+WKHvTPrX3yWwvyRQ92Z9SeS2F+SKHuzPqT2PP0Nj9RakxE7w2LKUpHH71lhhP/dSSiZNJYKZhZJhce9h6lrqsZH/AGUb5Eswn8dpmb4Hkb18RBJpS/7pi7o/+KPlI6b8wHKWjhVbImY+Ph/v4TYtCKOwmZZma0jjDJVswvMVirLtzwvHoO3QggggjoQQR3qRXjVTNM2lBERZBERAREQEREBERAREQEREBERAREQEREBERAVYrbZfX9x79nQ4etHFC0+iabd0jvVvyNiAPeOZ46bnezqsYUeJ651JXfuDajrXozt0cOQxOAPrBiG//EPWujC8K599vrEfK6x71nRdTK5ajgsbZyGSuV8fQrMMs9q1K2KKJg73Oe4gNA9ZKpQ8IThYe7iXo8//AM9V+0XOi/Pe2NjnuIa1o3JPoCxat4SsWqOHGpNVaa0hqSanRxU+Sxt29Sjjq5FrNwHRntgeXccxa/kcWgkDdW6vx84ZXJ469biLpOzZlcI4oYs5Vc+RxOwa0CTqSdgAse0Dwo1jNntXVa+lH8M9H5nT9unYwUmYjv035OZ2zbFWOMnsWBpfzbBnNu3zNxugv+h+N+Vy3BrB6tyehdTz5K3BVacfj6kEstt8kDXmeFrZy1sBJOxkcwj0gdN/tnwn9K0eHdrV9rH5ytXpZiPBX8ZJSHj9K2+RjOSSIO67dox3mF27XDl5j0Wb3NHcSc9wd0FpvKaEtNraYnpVczga+crM+H6sVZ8RMcjZABGJBFIYpSzmA2Pd1isHwK1fQ0tqLFVtEVdP1bmvsPqSljqV6u+GGkx9Xtm/dNAfGIHFzQNiXbML+9Bf9aeEVqTA624e42pw41IaudkvizRmip+OyCGEuYIv/Vhjeuz3c5Hmjp16LemO5mNcWlpI35T3hZLxr01qZ+tOHOsdNYPymk03cueNYmO3FWmlisVnRc7HylrN2O5SQSNweinDx84d0ia+W11pbD5SL+Lt461naglqzDo+J47T7prt2n84KDQEVBf4QPC6JwD+JOkGEgO2dnao6Ebg/wCk9IIKuWIzFDUGMr5HF3q2Sx9lnaQW6crZYpW/hNe0kOH5wUEJktsRrrEWWbNZlo5KE46+fJGx00TvV0a2cfn5h6lZ1WNRt8c1bpOqwEugnnyD9huAxkD4ep9HnWG/07H86s66MX/zRPL6z9Fn3CIi50EREBERAREQEREBERAREQEREBERAREQEREBQuoMTPZmp5LHiP4VolwiEri1ksT9u0icR3B3K0g9dnMYdiAQZpFqmqaJvB4IzEZylqGCQRbtmj82xTsN5ZoHfgyM9Hcdj3EdQSCCu18G1PyWD5sfUulmtLYvPyRy3K29mNpbHbgkdDPGCdyGysIe0b7HYHboFHO0PICez1LnYm778otMd/i5hP8AivbRwqtsVW+PX8LsT4x1RpBFaEEdQRGF2FVvIif2pz3z8X2SeRE/tTnvn4vsk7vD4/SVtG9aUVF1LojOeTmV+AtU5b4b8Ul8R8bnj7HxjkPZ8+0W/Lzcu+3o3XX0ZojUnklh/KfVOT8ovFI/hHxCePxfxjlHadnvFvy82+2/oTu8Pj9JLRvaEuu7H1XuLnVoXOJ3JMY3Kr3kRP7U575+L7JPIif2pz3z8X2Sd3h8fpJaN6wfBtT8lg+bH1Lq5fOUNOVojYkbG6Q8letEN5Z3fgRsHVx/MO7vOwBKihoiQjaTUudkbvvsbLG/4tYD/ipDC6TxeBmknq13OtyDlfbsyvnnePUZHku2/Nvt+ZNHCp2zVf4R9Z6SbHHgMVYbbtZfJMYzJW2tj7JjuZteFpJZGD6T5xLiO8n1AKcRF5V1TXN5SdoiIsIIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiIK7xGrY65w91RBl70mMxMuLtMuXofu68JicJJG9D1a3cjoe7uXR4P08Pj+FWkaun8nNmsHDi67KORsb9pZhEYDJHbgdXDY9w7+5SOv7MNPQmpLFjEnPQRY2zJJimt5jdaInEwAbHfnHm7bH7ruK6fCm5XyPDPS1qpgHaWrTY2vJFhHs5DQaYwRCW7Dbk+522Hd3ILWiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiIInVseWm0rmY8BLFBnXUpm4+WcbxssFh7Iu3B80P5Seh6ehdfQUOfr6JwUWq54LOpmUom5KaqAIn2OUdoWbADYu326D+hcfEatjrnD3VEGXvSYzEy4u0y5eh+7rwmJwkkb0PVrdyOh7u5dHg/Tw+P4VaRq6fyc2awcOLrso5Gxv2lmERgMkduB1cNj3Dv7kFwREQEREBERAREQEREBERAREQEREBERAREQEREBERARFD6h1CMKK8MMBuZC04tgrB3ICBtzPc7Y8rGgjc7HvAAJIB1TTNc6NPiJhFSTnNXk7ihhAPUbUx2/T2fVfPhzWH5Dg/epvs11arXvjOFsu6KkfDmsPyHB+9TfZp8Oaw/IcH71N9mmq174zgsu6KkfDmsPyHB+9TfZp8Oaw/IcH71N9mmq174zgsu6KkfDmsPyHB+9TfZp8Oaw/IcH71N9mmq174zgs8Hfwo3Ax2N1Di+KOMrk1skGY7Llo35Z2N2hkP8AxRt5N+4dk30uUZ/BdcFJM/r7JcSrsbmUMCx9LHu6gSW5Yy2Qg+kMieQQfxzT6F7Y4r6RzfF/h5nNH5rH4TxDKVzEZG2ZS6F4IdHI3ePbmY8NcN+m7eq6vBjQeb4I8NsLo7DU8LLVx8ZD7MliUPsSuJdJI7aPvc4np12Gw7gmq174zgs21FSPhzWH5Dg/epvs0+HNYfkOD96m+zTVa98ZwWXdFSPhzWH5Dg/epvs0+HNYfkOD96m+zTVa98ZwWXdFSPhzWH5Dg/epvs0+HNYfkOD96m+zTVa98ZwWXdFSPhzWH5Dg/epvs0+HNYD/AOxwZ/N41MP/AMaarXvjOCy7ooTTuo3Zh1irareI5OsGmauH9owtdvyvY/YczTykb7AggggembXNXRVROjV4oIiLAIiICIiAiIgIiICIiAiIgKlaiO/EbCj0DFXdvzfx1X/+v+iuqpOov5R8N/VNz9dWXZ2X9z+J+UtQk0WTceNcZbTF7RGHx+bh0nV1BlH07mo54Y5BTayCSVrGiUGMPlcwMaXggdehOyyOnxw13W0XXoVcna1Rm83rK9g8dnaVKoTLRrxFxmrROdFC5x7JwHO8t5jIRzANavSaoibMvWqLyzkeIPGDTuCfWyBv4sWdQ4bH4zN5/H0PGZWWZzFYjlhrSvjIZ5hDm8hPMR023Xc1fxl1hwfHErEXMm7WF7FV8RPhrlqpBDKH3p31yyVsXZxuDHsDh9zvvyl3pTSgemkXmevrLi9prF6ss5KDOy4mvprIXWZXUFDF15aV6KIvh7NtWaRsjHedu17NwWt85wJUrNqTVen+DGGz+e4iXxn9Sx49tOHGYKrYeyxIwvNerDyDne8Hq6Vzmt7Mu2aNwGkPQMkjIY3SSOaxjRu5zjsAPWSv0vGevNZ6u1t4N/FzD6kv5CrlNNZKtXNi1SqwWrNeQV5WMnjiMkTXDtfuoyNw1vd5wOkcTdZ630jn9JcPcJlc7nsxdp28nezlOhjn5B0McjGsYyOUw1h1lALuUkBo80klwaQ9CIs74I5XW2T03kGa5x1mnerX3xU7FyOvFPbq8rHMkljgkkjY/mL2kNdseQHYb7Lq+EBmNY4PSWOs6Q8cjAyUTctaxlJl27Wo8r+0kggeC2Rwd2e42ceUuIaSrfZcaciw7RHEy/nOIPD3HUdWs1Vp/K6dyV6e+ylHB43NDYrsY9zQ0GNzA97HMHKN992gjYVKrxP11qPUmGwdXU3wYclrvUGDfbbQglfFTqxzPiYwOZtzNEYAc4Hr1dz9xmlA9PIvJ8/EHiXp7RmuNT2dc/CTdFanGH8RkxNaNmTriWvzOnc1u7ZOWxsDFyAcg3B3O3e4i8QuIVOrxqzuK1h8G1dD3I3UMb8GV5Y52eJ15nxzPc3mLSXu25S1wLju4jlDWkPUSLA4ddak0DrrKYbVOtY72Jm0dY1GMraxsMXwZLDKyN/KyIN549pQ4MdzO8zbmO6qejOMGu6eos5isnkM1kKNrSN3P4vIZ/DVKE7JYXMAdHHC47xkSg8szQ8Fo33BKaUD1Qi82VNW8QtM8E9I8TsxrGXMRSR4rJ5rGMx1aOBlCVgFgsLY+fna2Zkrjzbbwu5Q1ruVaZwv1bldcav19fdcEmlqOSZh8TXbGwAyQMHjU3OBzODpXlg3JA7HoBud7FVxc8OduJUw9eIbv+f+OO3/AHP/AFV3VHw/8pc39UD9cVeF5dq/9x8IakREXGyIiICIiAiIgIiICIiAiIgKk6i/lHw39U3P11ZXZVbVuLtNyePzdOu646pFLWnqx7do6KQscXM373NdG3zdxuC7bchoPV2aYjE27p+UrDK/CQ0Tktc6RxlTG4nKZowZBtiWrisjUqyFoY8AltuN8MoBIIa8DY7OBBaFAaH4NZ3WXDiTC8Rpb9OSllW3dOzQ264ymKYxjRG4zVo2xdoHGXblaRyuAO/o1t2sYGHZ2KzoO3UDDWjt+kR7L55Z1/krPfQlv7NdncVzN9GV0ZVb4j6NrBVMbldTakzrq+aqZ1tzJ3I5JjNXex8bOkYY2MmMbtY1u+5O4J3XZ1LwR0zrDJ6rt5mKxfj1Ljq2MvVHyARCOB8j43R7AOa8OlJ5uY7FrSNtutg8s6/yVnvoS39mnlnX+Ss99CW/s1e4r4TRncq2K4JQUtP5/EZDWGq9RVsxjpMW92YyDJXV4Xtc0mMCNrefZx89wc47Dcld3UnB7Eak0bp7T7r2Sx50++vNjMpRmYy3WlhjMbJA4sLCSxzmkFhaQ49FOeWdf5Kz30Jb+zTyzr/JWe+hLf2adxXwyaM7lKpeDnpqDCa0xVy/mczW1fHGMq7I3BJI+VjC3tmODQWvI5Og80dmzla0Ag/cl4PuPy+Nwrbmq9UTZ7DTSS0NTeOxNyUDZGhskXOIgx0bg0btcw77b96unlnX+Ss99CW/s08s6/yVnvoS39mncV8MmjO5ANxmrNBYehi9NVG6zY0ySWMhqjUEkFkvc/m721pA4dT0AaGgAAbd3Xuaf1dxFx5qahdNoF1Wdlird0jnzYmldyva5kglqMbybOB5SHAnY9C0FT+Q4hY3FULN27TzNSnWidNPYnw9pkcUbQS5znGPYAAEknuAX4xPEnE57GVcjja2XyGPtxNmr2q2IsyRTRuG7XNcIyHAjqCE7jE4ZTRlUofBw09jcVpqvh8tnMFfwJteL5elaYbcosv57ImMkb2P7R+zju3oQOXlXNpXwd9OaRt4KzUyGYsS4fM3s5A65ZbK6Se3E+OUSOLOZzQJHEdebfYlzuu9y8s6/wAlZ76Et/Zp5Z1/krPfQlv7NO4r4V0Z3KvkuBGAymlNY6fluZJtLVOWOYuyMljEkcxMJ5YyWbBn8Qzo4OPV3Xu2/eZ4HYHOYfiFjZ7eRZBrd4kyLo5Iw6I9hHB/E7sIb5sbT5wd1J9HRWXyzr/JWe+hLf2aeWdf5Kz30Jb+zTuK+E0Z3IHVXBbTutMzPkMt41ZFjT9jTUtXtA2J9WaSN73dG8wkBibs4OAHXpvsRA0fBvxMGUjydzVOqMzkWYuzhjZyN2KQupzMDTEWiINHKWh4cAHFwHMXDor55Z1/krPfQlv7NPLOv8lZ76Et/Zp3FfCmjO5WdWaVt6a4LN0dprBv1U2PFswUVW7bjg5oOx7HtJpCACA0Au5W7nc7NUnwc4dw8J+GGnNJxPbM7GVGxzzM32lnO7pZBv186Rz3dfWpPyzr/JWe+hLf2aeWVc92KzxP9S2h/wDjTuMTx0ZXRnc7WH/lLm/qgfrirwqppXG2rOYtZ23WkoiWuyrXrTbdqGBznOe8DflLiRs3fcBoJ2JLRa1x9pmJrtHuiEkREXIgiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiIKNx1/kR4hf2dyP8AlpFEeC7/ADcOGX9naP6lql+Ov8iPEL+zuR/y0iiPBd/m4cMv7O0f1LUGoIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiCjcdf5EeIX9ncj/AJaRRHgu/wA3Dhl/Z2j+papfjr/IjxC/s7kf8tIojwXf5uHDL+ztH9S1BqCIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAi432Io3cr5WMPqc4BfnxyD8fH/AHwraRzIuHxyD8fH/fCeOQfj4/74S0jmRcPjkH4+P++E8cg/Hx/3wlpHMi4fHIPx8f8AfCeOQfj4/wC+EtI8Y+Gp4ZGR4RZPUvDa3oA3KmbwskdPOfC3Zh8c8Lo3P7LsD1Y/nHLz9eUHcc3SM8BzwxrvEG5ozhJV0E+OviMOIbeeblecRxV4eUSmHsRsHydmzbn6GQdTt1vn8IXwVg4t8Fpc3jgyXUWlee/AGEF0tcgeMR/3Wh49O8ew+6UT/BwcE4eGnCN+rcoyOLPaq5Z2CQgPhpN/0Le/pz7mQ7d4czfq1LSPYKLh8cg/Hx/3wnjkH4+P++EtI5kXD45B+Pj/AL4TxyD8fH/fCWkcyLh8cg/Hx/3wnjkH4+P++EtI5kXD45B+Pj/vhfplmKRwa2VjnH0BwJS0jkREUBERAREQEREBERAREQEREBERAVa15kbFHF1IK0zq0l+5FUM0Z2exrty4tOx2dytIB9BO6sqqHEb/AEWnv63h/wDCRdPZoicWmJWPFFt4faYA87T2Mld6XzVGSPcfWXOBJP5yd19+L7S3s3iPcIv2VLZLJVMNjrN+/Zip0qsbpp7E7wyOJjRu5znHoAACSSqPp/j9oLU1HJ3aOeDaWNq+O2bVypPVibB+Na+VjQ9n+80kFd/f4nHOZed6xfF9pb2bxHuEX7KfF9pb2bxHuEX7KplnwgNN53Q+ssppLJR38tgcRPkxSv1J6ziGxPfG8xytje6NxZtzN6H0FceL4rZa7q7hZipK9IV9U6ftZW65rH88csUdVzWxHm2Dd537hwcejeo67zWMTjnMvO9d/i+0t7N4j3CL9lPi+0t7N4j3CL9lV6Dj3oKzqluno9QxOyT7Rosd2EwrPsAkGFtgs7J0m4I5A8ncbbb9FyXuOmh8frA6XkzfaZptiOpJDWqTzRwzPIDI5JWMMcbySPNc4HqE7/E45zLzvTvxfaW9m8R7hF+ynxfaW9m8R7hF+yqbwy49YviPrLVmnIqV6nbw2SlpQvfRsiOeOOOJzpHSuibHG7mkcBGXcxDQ4bhwK02zZhpVpbFiVkEETDJJLI4NaxoG5JJ7gB6VYx8SfCucy870J8X2lvZvEe4Rfsp8X2lvZvEe4RfsqrY3wieH2W09lc5Vzr34nGNhfZtOx9pjQyZ/ZxPYHRgyNc7oHMDh0J32Vh1HxK03pLI2qOWyPilqriLGdmj7CR/LSgLRNLu1pB5S9vmjzjv0BTv8TjnMvO92Pi+0t7N4j3CL9lPi+0t7N4j3CL9lUxvhO8Nn2hWj1BLLZki7evDFi7b33I/w64ERNhvp3i5gACe4KTt8etB0sLgcs7Pslo55spxjq1aad9sx7c7GMYwuLwTtybc24I23B2nf4nHOZed6wfF9pb2bxHuEX7KfF9pb2bxHuEX7KqGI8JXhvnbVCClqQSuu2RSje6lZZGywXFogle6MNhlJGwjkLXHcbDqFI5TjtobCaybpW/nPFc0bEdTklqTiETSAGOMz8nZB7g5uzS/c7j1p3+JxzmXnenvi+0t7N4j3CL9lPi+0t7N4j3CL9lQ1/jXo7HaysaTkyc0uoq8sMU1Ctj7M74jK1ro3OMcbg1hD27vJ5RvsSCoLSnHjBWtD5PVebzlBmIjzNjHVJKtK3DKQ1/LHA6CVgldY23DmsYRuDsNgU7/E45zLzvXb4vtLezeI9wi/ZTyA0wAeXTuKYT98ynG0jruNiBuOoBVbHhA6AOlp9QHULG46C2yhIx1acWW2XdWQmsWdtzuHUN5NyOoGyt2ltUYzWeCq5nD2HWcfZ5uzkfC+JxLXFjgWPAc0hzSCCAeivf4k/wDc5l53pHQd6eerlKE877Rxl11Rk0ri+RzOzjlaHuPVxAlDeY7k8oJJJJNnVO4e/wCv6w/rgf5OqriuDtMRGLNuXrEE+IiIuZBERAREQEREBERAREQEREBVDiN/otPf1vD/AOEit6qPEVhNbBP+8jy0BcfVuHNH+LgP0rq7N+9SseKgeEfozK8QOCmp8HhIW2snYiikiqveGCz2c0crodz0HaNY5nXp53XoqHxP1DkOOHCjM4XB6I1RQvVPE8i6jm8YaUdrsLUUr6jXPOz3ObG4Dl3YenndV6GRe8xdHmfUuPznG7WefzOH0xmsHQg0Pk8GJM9SdRlu27RaY4Wsk2JYzkJL/ud3dCe9cmFhzc2W4EZx2mc/Tr1MLe09kGvouFjG2JGVo2SSx97Yuau89p9ztyu7iF6URTRHkDg9wyoUsLpzQms9KcRJc3jLbWTyMyF9+Bc6KUyRWmu7YQchLWP5QOYOP3PTdX3g9nMlwkdk9FZnRupbeSn1DctR5nHY11indis2XSNsyTg8rC1jwHteQ4CPoD0C9BIkU2GLcLZ7+juLPETAZLA5hrc9nnZihloqL3498LqcLSHTjzWPDoXN5XbEkt233Wt52GCxhMhFaqOyFZ9eRstRjeZ07C0gsA6blw3G3518z2AxmqcTYxeYoVspjbAAmqW4hJFIAQ4czT0OxAP6FVsRwM4dYDJ1sjjdDafoX6zxLBZrY2JkkTx3Oa4N3B/OFbTGwecJdPax1Bwx13o/TOG1VNomriac2Go6ro+LXq9mKy176UDnbOmjEUY5S7m2OzQ4hTvEi3l+J2sNU5LFaQ1PXx54Y5vGwzZDETV3T25HwlsDGOHMXnboNvO68vMASvVSLOiMNxunMpHxO4K2nYu42rj9K361uc13hlaV0dINjkdtsxx5H7NOxPK71FUrhpozPY/WHDKWzgsjWr0dSatmnfLUkYyvFM+YwvcSNmtfzDlJ2DtxtvuvU6K6I8sZ3Rmel4K8TqcWCyL79riJ4/VrspyGWaD4Wqv7aNu27mcjXO5x05QTvsCq/wAbMbrHVcevKeSxGustmoMzDNhaWJilbhm42GaGVsnmERzylrZCWu55OflDWjYFex0Umm4yfhphLdXjfxcy8+Os1qmSfiPFbc9d0bbDWU9nBjnAc3K4kEDuO4OxWI5fhvqJjK+fsYLUljF4jiHqC9cx+FknqZGSpZfIyK1XMbmSPDeYHzDu5j3bbglexkVmm48y29EaLs6HymdbpPibFYuZeo4XpfHLOahmrseYLkcc0j5WsZ2j2dW7ncgsLdita4E5TVmY4cUrOs4Z4sx207GPt1m1rE1dsrhBLNC3pHI6MNLmjuJ7h3LQEViLDo8Pf9f1h/XA/wAnVVxVQ4fMItark72S5fdp2PXarXYf8WkfoVvXh2n92fhHyhZERFyoIiICIiAiIgIiICIiAiIgLr5DH1srSmqXIWWK0zeV8bxuCP8A/eldhFYmYm8Cnu4f2mHlg1dnIIh9zHy1JOUermfA5x/pJJ/OvnkBf9s838zR/dlcUXTrOLyyjo1eVO8gL/tnm/maP7snkBf9s838zR/dlcV+ZHiONzyCQ0EkNBJ/QB1Kazicso6F5VDyAv8Atnm/maP7ss1uZqzrDK610foDiFen13p2CJ0jctj4DQimeSRHI9lZpJ2HXlPTmB67OAm6+Ty3hEaOxOU09lNT8M6MGZ7SbxrHsht5KrEdwGCTcxxyO5TuR1Ac0tIK1uKrDXlnkihjjkncHyvY0AyODQ0Fx9J5WtG59AA9Ca1icso6F5UbDcPM/FiqjMrrrJWsk2JoszU6dOGF8m3nFjHQvLW79wLifzru+QF/2zzfzNH92VxRNZxOWUdC8qd5AX/bPN/M0f3ZPIC/7Z5v5mj+7K4oms4nLKOheVO8gL/tnm/maP7suvkeHuakoWWUdcZWvddG4QS2KlOWNj9vNLmCBpcAdtwHDf1jvV5RNZxOWUdC8sAr5q/oGXRWnOJfEO1X1nqWaatWdhcdCKEsrXjkY1z67i0lr2bcxG7ubbZad5AX/bPN/M0f3ZWyerDZdE6aGOV0L+0jL2gljtiOYb9x2JG49ZWUZC9mOAGmdWajzeV1JxHxc2TFuvQrUY5beNryOHaNHLymSNhLndw5WtAA6EprOJyyjoXla/IC/wC2eb+Zo/uyeQF/2zzfzNH92VsqWW3KsNhjZGMlY17WyxujeARvs5rgC0+sEAj0rlTWcTllHQvKneQF/wBs838zR/dl+maBt77S6uzczD3sLKjN+vrbACP0FW9E1nE5ZR0Ly6mLxdXC0IaVKEQVohs1gJPedyST1JJJJJ3JJJJJK7aIuaZmZvLIiIoCIiAiIgIiICIiAiIgIiICIiAiKl8Y9Q6o0pw4y+W0ZhW6i1JW7F1XFvBIsAzMEjehBB7MvIO/QjfY9xDu8QNYWtJ6TzmRw2Gn1XmsdXbPHgqErRYnLjs0de4HZx32JIY7YOI2NdxXDp+rNX6S4i6idlsRqGhiex8m48kX0KliVp7Zxaw8sjwHFnNvykNadtwCJnR/DDTmktR5/U+OxQq6g1G+OfJ2nyvkke5rQAwFxPK0dTyt2G57u7a4ICIiAiIgIiICIiAiIgz/ACfDZmI4g5biNibGXu5yXDOpHA/CJZRuvZ50J5Heax4PM0O6NHaOJG5JMjw01rkdX6Ow2R1FgJ9HZ662QS4O9Mx0rHscWu5SPumnbmB2B5SCQFb1U9Y8LdM69zWm8vmsaLOU07b8dxltkr4pK8nTfZzSN2nYbtO4Ow3HRBbEVH4M6k1Xq3QVXJ61wTdN5+WxYbJjWgjso2zPbEepO5LA07+nfuCvCAiIgIiICIiAiIgIiICIiAiIgIiICIiAqRxqxmZzPDDOU9P6nh0ZmJWRivnLDg1lUiVhJJPraC3/AJld1mHhMeR3xHao+MDxzyQ7OHx/xDftuXt4+Tl26/d8n6N0GlVWubWhD3iV4YA54++O3euVcFHs/Eq/Y79j2beTfv5dun+C50BERAREQEREBERAREQEReEf4Ufgi/P6TxHEzHRF9rCAY7JbdSar3kxP/oZK9w//AHvzIPWXBDFZvC8OqNTUOq4da5Vs1gyZmu4OZK0zPLWgj8BpDP8AlV8X8dv4P7gvPxW4+4rKStkZhtKSR5izMzoDMx4NePf1ukaHbelsb1/YlAREQEREBERAREQEREBERAREQEREBERAVI41ZPM4bhhnLmn9MQ6zzETIzXwdhocy0TKwEEH1NJd/yq7qkcasZmczwwzlPT+p4dGZiVkYr5yw4NZVIlYSST62gt/5kFyquc6tCXsETywFzB96du5cq4qrXNrQh7xK8MAc8ffHbvXKgIiIK3mtV2K199DE48ZO3CAZ3Sz9hBDuAQ1z+VxLiDvytadhsTtu3eO8qNW+zmH+mpf3VdXTp5srqgnv+Fngn0naKID/AAAH6FOL6uhh4dqZoifDfu5TDWyEb5Uat9nMP9NS/uqeVGrfZzD/AE1L+6qSXTOZx7cu3Em9WGUdAbTaJmb25hDg0yBm/NyBxA5tttyAnsvLjOrqX5OHyo1b7OYf6al/dU8qNW+zmH+mpf3Vc2IzOP1Bjochi71bJUJwTFapzNlikAJB5XNJB6gjofQu4nsvLjOrqX5I3yo1b7OYf6al/dU8qNW+zmH+mpf3VSSJbC8uP7dS/JG+VGrfZzD/AE1L+6qL1Q/O6y05k8FltKYW3jMlWkqWYXZuXz43tLXD/Veh2Pf6FZkS2F5cf26l+TDfBi4LZjwZ9CWMDRxeIzF65bfauZN+TkhdMe6NvJ4u7ZrWgDbmPUuPTm2GweVGrfZzD/TUv7qpJEtheXH9upfkjfKjVvs5h/pqX91Tyo1b7OYf6al/dVJIlsLy4/t1L8kb5Uat9nMP9NS/uqeVGrfZzD/TUv7qpJEtheXH9upfkjfKjVvs5h/pqX91Tyo1b7OYf6al/dV2clkqeGx9i/kLUFGjWjMs9mzII4omAblznOIDQB3krnilZPEyWJ7ZI3tDmvYdw4HuIPpCey8uM6upfkj/ACo1b7OYf6al/dV+ma1y+P8A47NYOvVoN/0tihedaMQ/CcwxMPKPSRvsOuykF08y0Ow94OAcDBICCNwfNKsU4VU20IznqXjcZfi/orCaQy2qbGp8bJp/EyCG9fqTizHXkJYAx3Z8xDt5Gebtv5w9aiMtx0wVE6Dkx+OzmoqWszG7HXcNjnzwxRP7LaawTsYWATNcS4bgB3TzSF2+FOgNNaa4fY+ti8BjqFfIwQ3bkUFZjW2J3MYTJINvOduB1PqHqV6a0MaGtAa0DYADYAL5ldOjVNO5mVKxms9TX+JWd0/NomzS07QqiWrqeW7GYbsxEZ7JsI88bc793HpvGR6QqjW4z6j0HpzT9jinpmPEZXO6hiwVZmn5hcrw9q0dlLM9xBa0uEgJAO2zfWtkUFro51ujc2/SzKsmpY6cz8Yy43eF1kMPZtd1bsC7Yb7jbdYE6ih9HTZmxpPDSairw1c+6nCchDXfzxMscg7QMPpbzb7fmUwgIiICIiAsw8JjyO+I7VHxgeOeSHZw+P8AiG/bcvbx8nLt1+75P0brT1SONWTzOG4YZy5p/TEOs8xEyM18HYaHMtEysBBB9TSXf8qC4Uez8Sr9jv2PZt5N+/l26f4LnXFVc51aEvYInlgLmD707dy5UBERBQNOf+6ao/raT9VEs0zGc1jxE4v6l0lp3U/kZi9MUqctm1BQhtWbliyJHtH8cHNbG1sfXZvMST1Gy0vTn/umqP62k/VRKsa04KY3Vuqm6lp5zPaUzrqwpWLun7bIXW4GklrJWvY9ruUuds7YOG52K+ti+OXyWfFSZ8pxC1xr3V2msNrSPTbdHUqML7TMXBMcpdmr9s6SUSBwjhA5Ryx7Hcu87oFC8INey8UOMmhdV2K7atnK8N5bE0Me/K2Tx+AP5d+vLzA7b+jZX/UXg8YjO2zag1JqfCW7GOhxeRsYzIhkmUgiaWs8Zc9ji54DnDtG8r/OPnKYg4Ladx2f0llsSbmEm01Rdi6sNCflimpkN/iJmuB52Asa4dQeYb7rwtKML4f6su4DwZOGGPw+oMnh87lJbEVWrhMVDkLt0Nkmc9kbJj2bA0bOdI/zQBt0Lguzj+L2vtQ8PtI1nZiTBail1/JpTIZB2PrmaSBjLB3dD58bJCGR78hIDmnYlp2Ok1/Bm0/jcbjamKz2osQ/FZCzexVqpcjMuPbYbtNWi543DsXd/K8OIJ6EdNqvrXwbJKOH0ziNKX86+u7W0WocjdkyMZtVAa0zJp45JBu4l5Y4g85Lnu2HLuBm1UQOhqDW/FLT8/EDRuKyUmq85h4cZkqeWjx8HjwpWJXtsN7FobDJMxsT3MHKA7fuJAB6eY43Z+TA6HwOktRZPV2X1DdyEdjL1sRUhyVRlRrXSQGrO6GFk4MjAecDZocQw7hajiuAOMw2GzletqbU7c3mrENi9qY5BvwnIYduyZziPkDGjcBgZy7OcCDuumfBl0v5PQUW5LOR5eHKy5uPUsd0NybbsjQySXtAzk85gDCzk5C0AcvRW0jvcD8try9UzlbW+PvQsq2WfBl/Jw1YLVuFzAXdrHWlkjDmPBG7SA4Fp5Qd1zcfMrrHDaIgs6MbZ8aGQgGQmx9Rlu5BR3PbSV4X+bJIPN2aQehdsCQF2YMHqjh7hYKWnGya5nmmkmtXNV550EzSQ0NDTHWkby9D5oawDbpvuV1reE1lxDpux+oIjoSOF7bEGS0nqJ09l0g3HI5slNjeQhxJB5gSB09K17rCoaS4n38trLhPQx2sPKrC5rH5uW9eNCOs+1JXfXEQfHygxPj7R7HNHLuQd29wFascUdc5bNQ4mlqMY51riRf04LPiMEpiox0nyNY1pbsXNc3cOO5325uZu7TosPg36eoYnA18Zl87icphrVu5Bna1pjrssto72TKZI3Mf2h2JBZt5rdttlyad8HTTumn42SDJZq1LR1FPqZsly0yV8tuWB0LxI4s3czZxO2/NzffbdFLVDIMxr/iZpfSPFDPya7ORHD/LitFWlxFVgycIZBM4WHNYCHcs/IDF2e3Lud99hNcRtccQI8lxwu4fWJw9HQsFe7j6DMZXmbPvj47Ekcr3tLiwuDtuXZwLz5xADRqOa4EYDO6a1/hLFzJMqa0tG5kHxyxiSJ5iij2hJYQ0bQt+6DupPXu27OW4MYTMxcRY5rV9o11A2vkuzkYOxa2qKwMO7DynkG/nc3nfm6JaRn2I19qfR+tKFTVOsIshhczpK3n5LVjHxQtxUsBhLyzswC6HlmJ5XlzvMHnHcqH4R8VtZz8TcVhsxkszm8Bn8LayNG9nMLVxry+F0RD4GQuLuyc2X7mZoePNO53K1rO8FdO6lu0Z8kbdmKrgrWnTVdI0RzVbAjEnPs3m59omgFpG256d20RprwesXpzUmCzsmp9TZnI4avLSqOyl2ORgrSM5DCWNiaNhsx3MAHksbzOcBslpGPxZniFqPwPMnr/Na6fZyVrTcl3xAYag+oQ0F2z2Phdzl7W7PB83zzytGwKl9QcT9fal13l9NaSiztKlpuhQEsunsbjbJmsWK4mHai3NGGxhpaA2Ju5If5w2AWvV+C2ErcFTwwbayBwBxbsT4yZGeNdk5paXc3Jy82x7+Xb8y6Oo+AeJzWoGZzHZ/UOlMu6nHj7dvA3GQOvQxgiMTB0bmlzdzs9oa4bkA7bKaMjL9QcSOJuIuaRu61yE/DTBTYqL4QvUsXBfrMyfbuY+O28l/YROZ2Za4EAF5Bk6L0jlzviLpHd2D/8AxKz7XPAbHcQa8FLJan1RHhxRix9vFV8kBXvxMJP8dzMc4udvs57XNc4bAnotAyrQzDXGtADRXeAB6ByleuHExVF1jxSmhf8AYjT39XV/1TVOKD0L/sRp7+rq/wCqamP11pvLahs4CjqHFXc7WjM0+Mr3YpLMTA4NL3RB3M1oc5oJI23IHpXDjfuVfGSfFOIi62TtvoY23airSXJIYnyNrw7c8pAJDG7+k7bD+leSMy4G0tLady3EPTundSXc7bq6glvZKtc5nDHTWWtkFeN5aA5gA/CcQdwSD0WrKicGH2cpoanqHK6Pq6J1Jnd72VxteMCTtj5odK7la5zyxrN+Ybju3O26vaAiIgIiICpHGrGZnM8MM5T0/qeHRmYlZGK+csODWVSJWEkk+toLf+ZXdZh4THkd8R2qPjA8c8kOzh8f8Q37bl7ePk5duv3fJ+jdBpVVrm1oQ94leGAOePvjt3rlXBR7PxKv2O/Y9m3k37+Xbp/gudAREQUK61+kcxlZbFazNjshY8ajsVa75+zcWMY5j2saXDq3mDtttiQSNhvweXeJ9WR+i7X2a0RF3R2imYjTpvPxt9JavHvZ35d4n1ZH6LtfZp5d4n1ZH6LtfZrREV1jC4Jz/BsZ35d4n1ZH6LtfZp5d4n1ZH6LtfZrRETWMLgnP8Gxnfl3ifVkfou19mnl3ifVkfou19mtERNYwuCc/wbGd+XeJ9WR+i7X2aeXeJ9WR+i7X2a0RE1jC4Jz/AAbGXYbivpjUVBl7FXp8nSeXNbZp0bEsbi0kOAc2Mg7EEH84Xd8u8T6sj9F2vs10fBpyul8zwixlvRunrWlsA6xaEOMub9pG8WJBI47ud908OcOvcVqSaxhcE5/g2M78u8T6sj9F2vs08u8T6sj9F2vs1oiJrGFwTn+DYzvy7xPqyP0Xa+zTy7xPqyP0Xa+zWiImsYXBOf4NjO/LvE+rI/Rdr7NPLvE+rI/Rdr7NaIiaxhcE5/g2M78u8T6sj9F2vs1+LGofKCpNQw9O9YuWGOiY6alNBDFuNud8j2BoA3326k7dAStHRNYojbFM3+P4gvD+XvhwaL496Bs3LFzVGVzHDAu7Co7FSmCvWgJ2jgswx7dWjZokcCHdPO5jyiG/g4Is5p3UnEbXuHwcuqXYXCQY84WnKGW7L7NqNzTHzDl2aytK525B6NAB3O39VbtKvkac9S3BFaqzsMcsEzA9kjCNi1zT0II6EFZvwi8HXRnA3P6ryejqk+Mi1G6u+zju1560Doe02MII5mhxmeSC4gdA0NA2XDMzM3llz5PjdjdO+QMOcwmcxmQ1f2ccFUUXTeIzP7ICKy5m4iIdKBuenmv/AASoDiNq1nFLP5rhTpHVt/Ses8aauQvXYaMo5KrXwyOZFN0ZzObLGOhd0LgQRvtsyKAiIgIiICIiAqRxqyeZw3DDOXNP6Yh1nmImRmvg7DQ5lomVgIIPqaS7/lV3VG43Y/LZThbnq2C1TBonKvjjMOesvDI6m0rC5ziegBaHN/5kF0quc6tCXsETywFzB96du5cq62OmbYx9WVlhlpj4mubPG4ObICBs4EdCD37/AJ12UBERAREQEREBERAREQERfiaaOvE+WV7YomNLnvedmtA6kk+gIKdwgta2uaEpy8QqdKhqkyzieDHkGEMErhERs5w3MfIT17ye5XRZr4O+LoYfhVjauN1q/iFTbPZczPvl7QzkzvJbzczt+Qks7/vfQtKQEREBERAREQEREBERAREQEREBERAREQF0M9gcbqjD3MTl6NfJ4y5GYrFS1GJI5WHvDmnoV30QZhW0fqfh3mtB4DQNHB1OGlCGWpk6Fl0vjULduaOSJ+55jzAgh3UmQk797bboniJpriRjrF7TGbp5urWsSVJpKknN2crDs5rh3g9Nxv3ggjcEFWJUHX2gMzY09aj4d5ejobPWMizJWLjcbHNFdeNg9s7OhdzgNBeDzeaOqC/IqVheLeBzPE3NcP2Pts1NiKkV2eOanJHDLC8N/jInkcrmguDT179wN9jtA8LPCV0Jxm1rqrTOlMm7I29PFna2Whvi9tp3Dn13hxMjGuHKXbAEkFpc1wcQ1NERAREQEREBFlvGPwkdGcC85pPFaoszQ2dSXBUrui7MR1m8zWusWHve0RwtLxu7qdgdgdjtO664p0dC6k0ngpcXl8rkNSXDVrtxlJ0zIGt2Mk0zx0Yxgc0nrvsSQCASAkOInEXT3CnSV3Uup8gzGYioBzzOaXOc4nZrGtaCXOJ6AAKFZQ1VqnXDrc2Qw8/C63hxG3EyUXut25pfunSl+wawM2Abt1Ejg5u4BXJoXQGcwl/VVjVGq59YQ5bJeN0aVqrHHBjYWH+KijaB1I2YS7pu5ocACXF17QRemdMYnRmCp4XBY6vicTTZ2denUjDI42/mA9JO5J7ySSepUoiICIiAiIgIiICIiAiIgIiICIiAi62SutxuOtW3NLmwRPlLR6Q0E7f4LO8fpahqXHVMnnK7crkLULJpH2CXsYXNB5Y2k7MYN9gAB6zuSSenCwYxImqqbRn9YW29pqLOfi50x8hUfmQnxc6Y+QqPzIXvq+FxzlH3LsaMizn4udMfIVH5kJ8XOmPkKj8yE1fC45yj7jY0ZFnPxc6Y+QqPzIT4udMfIVH5kJq+FxzlH3GxlHh88R9b6F4VQ0NA4bLWMtnHyVrmZxlB8/wfTa3+M/jWHeKR5ewMcQfNEpBa5rSv5l+Dxxcv+D7xkweqeynbBWl7DI1Ni101V/SRux23O3nN36czWn0L+xfxc6Y+QqPzIXHNww0lYG0uncdKPU+u0pq+FxzlH3GxoGMyVXNY2pkKM7LVK3CyeCeI7tkjc0Oa4H0ggg/pXaWcM4baWjY1jMBQaxo2DWwgAD1L78XOmPkKj8yE1fC45yj7jY0ZFnPxc6Y+QqPzIT4udMfIVH5kJq+FxzlH3Gxoy4rVmGlWlsWJWQQQsMkksjg1rGgbkknuAHpWffFzpj5Co/MhPi50x8hUfmQmr4XHOUfcbH8hfCk4zW/CF425jPwCWfGNf4hh4GsJIqxuIZs3bfd5LpCPQXkepf0J/g4tVazucJruk9WaZyWIr6cfG3GZTIRTx+OwzGR5jAkGxMXKBuw7cskY5QRu/bIeF+ka42i05joh/uV2j/suX4udMfIVH5kJq+FxzlH3GxoyLOfi50x8hUfmQnxc6Y+QqPzITV8LjnKPuNjRkWc/Fzpj5Co/MhPi50x8hUfmQmr4XHOUfcbGjIs5+LnTHyFR+ZCfFzpj5Co/MhNXwuOco+42NGRZvNo3HYmtLZwtduIvxNL4Zqu7BzDrs5o6OadtiCD0/wCqvGncr8O6fxmS5QzxyrFY5R3DnYHbf4rxxcGKI0qZvHwt9ZS25IIiLlQREQEREBERAREQReqv9mMx/wDDm/8AAqvaZ/2cxX/xIv8AwCsOqv8AZjMf/Dm/8Cq9pn/ZzFf/ABIv/AL6OD+zPx+jXuUuHwhdA2tXV9M1s463mLFx1CGOvSsPiknZv2jGzCPs3Fmx5tnHl2PNtsuZ/HzQMeqvJ52oYhkvGxQ5uwm8W8Z327Dxjk7HtN+nJz82/TbfovOPD8TYbUGh9G6sGSwGmNNapnmwdi7p25DLesvknZWiltFpgG5ncd2OPaeb3EldvhdwupY7DY/h9rjTHEa9lq+RdHPPVyF84Ky3xgyx292zCBrfuXluwcHA+aSsRVMst9t+ENw+oZmxi59QCO3WvfBtk+J2DDWs8/II5pRHyREuIAL3AO9BK7es+OOiOH+XOLzmcFW8yITzRxVZrArRnfZ8zo2OELTsdjIWjYbrE9VaMz1jgNx8oRYLIy5DJanvWaFVlSQy2mF1cskiaBu8HlOzm7jzTt3Lg1Bo92l+KXESTU2n+IWao6htx38ba0bduivYjNdkTq07K8rGMe0sIDpdgWkecANk0pG5Z3jnonTucgw1rMumylinFkIKlClYuPmrSuc1krBDG/mbux25G+w2J2BBPwcdtDeWrdJuznZZx1k0mxS1J2RPsDfeJs7mCJz+h80O3/MqjoHQDNIcfJxjsNbp6do6GxuKo2JmPexgjs2CYBK7fmc1vZkjmJ25SfQsh1vR1hqHJCzm8RrvJ6jxWtK1/sKkE3wNWxkN5ro3wMYRHO7sQ09A+UOLtwACrNUwN40Hx6xeuOJOrtHspXatrCXvE4ZnUbPZ2A2Fr5HOkMQjj2c5zWtc7zg0ObuHBSOE496C1HqSHBY/UMU9+eV8FdxgmZXsyN35mQzuYIpXDY9GOceh9SoNChl8bxL4u6alxGYrHWL2WMTnq9KSSiwHHMhJkmaCI3NkiI2dsTu3bfdVDgnoPFSQaG03qXSHEatqHT7oHym/kL0mErWqrN2TRudN2Do3OZ5jYwducDlA3S8j1avP2nvCWl1VqTXlmtJXx+k9LCSFwtYLIvtzPbHGe1LmsDWtD5ADEGOk5Wl3mggr0CsS0Hp7KU8BxyjsY25BJkdRZGekySB7TajdRrta+MEee0ua4At3BII9C1NxM43wgtL43TemJNRZuvJm8tha+YbDhsfcmbYikbuZYIhG6Xk33OzhzNGxcApjNcc9EYHTWGz9nN9ticywyUJ6NSe2Z2gAkhkTHOAG433A29Oyy/gZpXNYjW3Dmxfw9+lDU4WUsdYlsVXxthtNlhLoHkgcsgAJLD5w2PRVDTFDV2m9BcPcRk8drLG6TE2bdkq+mKs7Mh25vyOqMk7MCaKF0bnuDmbA+buQ0hY0pG25rjFFPl+Fz9M2KOWwOr8hNXfdAc49kypNMDGQ4crueIA8wO3nDYHu05eQ9CaY1HpTRPCy1Z0pqD/6Y1nlHX6Dq7prkVez42I5gAT2rB4xGXPYXDq47nYr14tUzfxFKocZdH5XW82kqeXNnOwzSV5IYqsxibKxhe+LtuTsudrQSWc2427lzVOLek72mdN6hgyvPh9RWYaeLs+LSjxiWUkRt5SzmbuWnq4ADbqQskxYy+n+PXi+isLqrH4jKZezLqenlseW4hw7N3/rqtg90j3tZ5jHEO5iS1pG6pOm6moKvDbgzoOXRupGZfTGp8f8K2XYyQVIYoZZAZWzbcsjCCHBzNwB90W+maUjb7nhOcNMfYfFZ1M2AR25aEk76VkQR2Y3Oa+F8vZ8jZN2O2YXAuGxaCHAmRh49aEl01l88/PCrjMPYhq5F9ypPXkqSSvYyPtYpGNkYHGRuzi3l2JO+wJGMN0ZnviegpHBZHxwcTvhA1/E5O08W+GjJ2/Ltv2fZ+fz93L132X3jJo3PZTN8aX0sFkbcWRZpLxR0FSR4smG6503Z7Dz+RuxdtvyjbfYKaUjUZPCh4axOuMfnrLLFNoksVnYi6J4ott+2MXY84i269rtydR53UKY1bx10PoerjLOWzfJXyVbx2rNUqT2mSQbA9qTCx4azYg8zth171WrWn8hJ4QGtMh8G2XY6zoupUitdg4wyzCxbLomu22c4BzSWg77OHTqFkWAx+rsfpDhzgc9i9cwabh0ZWijx+mYZoJ35QbtfDbezlfC1rOz5Q9zI9y7mPTZW8j0PqTjZorSYwXwjm2752s+3ixUrzWjdiaIyTEImOLztKwho6kHcAgHbsUeLuk8hpnP6gjyhjxWA7QZOSzVmgfVLImyuDo3sD9+R7XDZp336bnosL4M6PzlPIeD8clgMlUfgdN5ijedbpvaKc4NaNrXOI2bzBj+Q7+e3ct3C5uNWjcha464nTVCNr9P8SWV351u/VgxkjZZHf0TQujhP/CE0ptcelJZ2WcY+aMkxyQl7SWlp2Ldx0PUfpXb4c/ye6X/AKrq/qmrguf6nP8A/pu/7Ln4c/ye6X/qur+qatYv7P8AMfKWvcsSIi+cyIiICIiAiIgIiII3UsbptOZWNgLnuqStAHpJYVW9LvEmmcQ5p3a6nCQR6RyBXZVCfQU1eVww+bs4mo4lwpiGKWKMnv5OZu7Rv97vsPQAOi7cDEpimaKpt7/9ZqPCyi43weeH2J1NHnq+nx8IxWjdi7W5YlgisEl3asgfIYmP3JPM1oIPULRlH+RWc9rJvcIfqTyKzntZN7hD9S947qPCuMp6FuaQRR/kVnPayb3CH6k8is57WTe4Q/Ul8LzI9ehbmkEUf5FZz2sm9wh+pPIrOe1k3uEP1JfC8yPXoW5pBcVqrDerTVrETJ68zDHJFI3dr2kbEEekELqeRWc9rJvcIfqTyKzntZN7hD9SXwvMj16FuamDwduFrSCOHmmQR1BGKh/ZWhqP8is57WTe4Q/UnkVnPayb3CH6k9lH/cZT0S0b0gip/EPHah0ZoDU2oINTPsT4nGWb8cMlGINe6KJzw07DfYluy6PCIaj4jcLdJ6ptakdUs5nGV78kENGIsjdJGHFrSRvsN/Sl8LzI9ei25r8s9f4PHC+R7nv4e6Zc5x3LjioSSf7quXkVnPayb3CH6k8is57WTe4Q/Unsp/7jKeiWje7VOnBj6kFWrCyvWgY2KKGJoa1jGjYNAHcAABsuZR/kVnPayb3CH6k8is57WTe4Q/Ul8LzI9ei25pBFH+RWc9rJvcIfqTyKzntZN7hD9SXwvMj16FuaQRR/kVnPayb3CH6k8is57WTe4Q/Ul8LzI9ehbmkFXqHD/T+N1jktVwY1g1FkImwWMhI98j+zaGgMZzEiNvmNJawAEjc7nqpHyKzntZN7hD9SeRWc9rJvcIfqS+FxxlPRLRvdnIPbHQsvcQ1rYnEk+gbFdrh9E6HQWmo3gtezGVmuB9BETVHx6Bnt7xZjOWcpSd0kp9hFFHMPwZOVu5b62ggEEg7gkK4LxxsSnQ0KZvtv/rr7rCIi4WRERAREQEREBERAREQEREBERAREQEREBERBRuOv8iPEL+zuR/y0iiPBd/m4cMv7O0f1LVL8df5EeIX9ncj/AJaRRHgu/wA3Dhl/Z2j+pag1BERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQUbjr/IjxC/s7kf8tIojwXf5uHDL+ztH9S1S/HX+RHiF/Z3I/wCWkUR4Lv8ANw4Zf2do/qWoNQREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERARdLMZqhgKEl3JW4aVSP7qWZ4aNz3AesnuAHUnuWd3uPNBspbjsLkL0YOwnl5K7XfnAcef/q0LrweyY/aP2qZn5ZrZqKLIfj7sey8vvzP2U+Pux7Ly+/M/ZXX+lds4PWOpZ4g/hQ+CcunuIGP4l0mPfj9QNZTvk9RFbijDY+voD4mDYD0xPPpUf8AwYHCO7qbixc13K6SHD6aicyLYkNntzRPiDdu4hsT5SfSC5nrXrXjfqCpxx4YZzRuU05LWiyEQ7G220x7q0zSHRygbDflcBuNxuNxuN11PB/yNTwf+F2L0djdPSXjWL5rV82GROtzvO7pC3Y7dOVoG52a1o3OyfpXbOD1jqWeo0WQ/H3Y9l5ffmfsp8fdj2Xl9+Z+yn6V2zg9Y6lmvIsto8eab5Wtv4PIU4ydjLC5k7W/nIBDtv6AVoeEz2P1Jj2XcZbiu1XHYSRO32PpaR3gj0g7ELkxuyY/Z9uLTMR6ZlnfREXIgiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiIC4rVqKlWmsTyNighYZJJHHYNaBuSf6AuVUfjVZfW4a5UMO3bvr1X/nZLPHG8fpa8j9K98DD77Fow+KYjOVjbLI9Saqta3yYyNnnjrN3NOo8bdgw+kj8Nw6k+jfYdB1jURfpuHh04VMUURaIYmbiIss4r62z1HVeD0vp2O82zdrT3rFjGwV5rDY43MaGsbYe2PqX9SdyABsOpImJiRhU6Uo1NFhp1XxDZW0zjcjNLgruQ1BJj23LVSs6axT8Vkka90bHvYyQOaR5p23YCQQS0/bfEfVGChzmmvhGLI52PUVPB0cxarMaGMswslEkkbA1rnMaXjoACeXp378+t0+MxMdbXsraGZKnJkJKDbUDr0cbZn1RIDK1jiQ15bvuGktcAe47H1LsrIdBYrKYfjlqSvls3Jn7PwBRc23LWjgdy9vP5pbGA07Hc77DoQPRudeXvhVziUzMxbbKC7uB1Hd0dlRlKHPJsNrNNp821GPvSO7nH3ru8Hp3FwPSRbropxKZori8SsTZ6ex9+vlaFa7VkE1axG2WKQffNcNwf+hXYVC4IWXTcP68Tvua1qzAz/hEz+UfoBA/Qr6vzLtGF3ONXh7pmG58RERc6CIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAoHXennaq0hlMXGWieeHeAu7hK0h0ZP5g9rVPIt0Vzh1xXT4xtHlaCUzRNc6N8T+50cg2cxw6FpHoIO4P9Cr2byWrK2QfHicBi8hSAHLPay767yduoLBXeB1/3v+i9AcQ+FkmXsy5bBdlHkJPOsVZXFsdg7AcwOx5X7D1bO6b7fdLJb1a9iZTFkcVkKEgOxE1Z5b+h7QWO/Q4r9E7P2zC7ZRE0VWn3xsv6+7mW3KWc1r3ptpPB/n31BL+6LrZXQ0/ECGhez0T9MZ/GzPNK7gMkZZYmOaA4c74Wgh3cWFhHmg7+q4fCtb8J/wA076k+Fa34T/mnfUuucLSi1czMfx0TRncrrOG1MxaeFnJ5TITYS6+/DYuWBJLLI5kjCJCW9W7SO2DeXbYbdBsurmuEGDz3lEbUl3tM1ar3nyxTBj6s8EbGRSQOA3YQGA7nfrv6DsrZ8K1vwn/NO+pPhWt+E/5p31Kzg0TFpj/Wt8jRncpFDh7d0TkredxFq7qvOXIIaU3lBkmwt7FjnuBDo4HbHd+23Lse/od95EZnXmzt9KYMHbptqCXqfdP6VZvhWt+E/wCad9SfCtb8J/zTvqUjC0dlEzEfx9YNGdyEw+U1fYyMMeT09iaNE79pYrZmSeRnQ7bMNZgO52H3Q23367bKx2Jm14HyuDnBg35WDdx/MB6SfQFyUYbeWlbHj8bfvSE7bQVXlo/pcQGgfnJC1Th9woloXIMvqBsZtwnnrUI3c7IXeh73dznj0Aea09fOPKW8vaO14XY6JnEqvO7Zf0+a6O9beHWnpdL6NxtCwALYa6awBt0lkcXvHT1FxH6FZERfneJXOLXNdXjM3PEREXmCIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiD/2Q==", + "text/plain": [ + "" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" } ], "source": [ - "chain = load_summarize_chain(llm, chain_type=\"refine\")\n", - "result = chain.invoke(split_docs)\n", + "from IPython.display import Image\n", "\n", - "print(result[\"output_text\"])" + "Image(app.get_graph().draw_mermaid_png())" ] }, { "cell_type": "markdown", - "id": "b5dc3052-5873-4ef2-b633-3709ede4131a", + "id": "678c0200-32df-4faf-bc54-a4dd470f199c", "metadata": {}, "source": [ - "Following the [Langsmith trace](https://smith.langchain.com/public/38017fa7-b190-4635-992c-e8554227a4bb/r), we can see the summaries iteratively updated with new information." - ] - }, - { - "cell_type": "markdown", - "id": "5b46f44d", - "metadata": {}, - "source": [ - "It's also possible to supply a prompt and return intermediate steps." - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "f86c8072", - "metadata": {}, - "outputs": [], - "source": [ - "prompt_template = \"\"\"Write a concise summary of the following:\n", - "{text}\n", - "CONCISE SUMMARY:\"\"\"\n", - "prompt = PromptTemplate.from_template(prompt_template)\n", - "\n", - "refine_template = (\n", - " \"Your job is to produce a final summary\\n\"\n", - " \"We have provided an existing summary up to a certain point: {existing_answer}\\n\"\n", - " \"We have the opportunity to refine the existing summary\"\n", - " \"(only if needed) with some more context below.\\n\"\n", - " \"------------\\n\"\n", - " \"{text}\\n\"\n", - " \"------------\\n\"\n", - " \"Given the new context, refine the original summary in Italian\"\n", - " \"If the context isn't useful, return the original summary.\"\n", - ")\n", - "refine_prompt = PromptTemplate.from_template(refine_template)\n", - "chain = load_summarize_chain(\n", - " llm=llm,\n", - " chain_type=\"refine\",\n", - " question_prompt=prompt,\n", - " refine_prompt=refine_prompt,\n", - " return_intermediate_steps=True,\n", - " input_key=\"input_documents\",\n", - " output_key=\"output_text\",\n", - ")\n", - "result = chain.invoke({\"input_documents\": split_docs}, return_only_outputs=True)" + "When running the application, we can stream the graph to observe its sequence of steps. Below, we will simply print out the name of the step.\n", + "\n", + "Note that because we have a loop in the graph, it can be helpful to specify a [recursion_limit](https://langchain-ai.github.io/langgraph/reference/errors/#langgraph.errors.GraphRecursionError) on its execution. This will raise a specific error when the specified limit is exceeded." ] }, { "cell_type": "code", - "execution_count": 15, - "id": "d9600b67-79d4-4f85-aba2-9fe81fa29f49", + "execution_count": 17, + "id": "b5e32a3c-f43e-4e18-a32d-466403afa844", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Il presente articolo discute il concetto di costruire agenti autonomi utilizzando LLM (large language model) come controller principale. Esplora i diversi componenti di un sistema di agenti alimentato da LLM, tra cui la pianificazione, la memoria e l'uso degli strumenti. Dimostrazioni di concetto come AutoGPT mostrano il potenziale di LLM come risolutore generale di problemi. Approcci come Chain of Thought, Tree of Thoughts, LLM+P, ReAct e Reflexion consentono agli agenti autonomi di pianificare, riflettere su se stessi e migliorarsi iterativamente. Tuttavia, ci sono sfide da affrontare, come la limitata capacità di contesto che limita l'inclusione di informazioni storiche dettagliate e la difficoltà di pianificazione a lungo termine e decomposizione delle attività. Inoltre, l'affidabilità dell'interfaccia di linguaggio naturale tra LLM e componenti esterni come la memoria e gli strumenti è incerta, poiché i LLM possono commettere errori di formattazione e mostrare comportamenti ribelli. Nonostante ciò, il sistema AutoGPT viene menzionato come esempio di dimostrazione di concetto che utilizza LLM come controller principale per agenti autonomi. Questo articolo fa riferimento a diverse fonti che esplorano approcci e applicazioni specifiche di LLM nell'ambito degli agenti autonomi.\n" + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['generate_summary']\n", + "['collect_summaries']\n", + "['collapse_summaries']\n", + "['collapse_summaries']\n", + "['generate_final_summary']\n" ] } ], "source": [ - "print(result[\"output_text\"])" + "async for step in app.astream(\n", + " {\"contents\": [doc.page_content for doc in split_docs]},\n", + " {\"recursion_limit\": 10},\n", + "):\n", + " print(list(step.keys()))" ] }, { "cell_type": "code", - "execution_count": 16, - "id": "5f91a8eb-daa5-4191-ace4-01765801db3e", + "execution_count": 31, + "id": "b0b28b30-d12b-4a30-a0e2-f897adab68c9", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "This article discusses the concept of building autonomous agents using LLM (large language model) as the core controller. The article explores the different components of an LLM-powered agent system, including planning, memory, and tool use. It also provides examples of proof-of-concept demos and highlights the potential of LLM as a general problem solver.\n", - "\n", - "Questo articolo discute del concetto di costruire agenti autonomi utilizzando LLM (large language model) come controller principale. L'articolo esplora i diversi componenti di un sistema di agenti alimentato da LLM, inclusa la pianificazione, la memoria e l'uso degli strumenti. Vengono forniti anche esempi di dimostrazioni di proof-of-concept e si evidenzia il potenziale di LLM come risolutore generale di problemi. Inoltre, vengono presentati approcci come Chain of Thought, Tree of Thoughts, LLM+P, ReAct e Reflexion che consentono agli agenti autonomi di pianificare, riflettere su se stessi e migliorare iterativamente.\n", - "\n", - "Questo articolo discute del concetto di costruire agenti autonomi utilizzando LLM (large language model) come controller principale. L'articolo esplora i diversi componenti di un sistema di agenti alimentato da LLM, inclusa la pianificazione, la memoria e l'uso degli strumenti. Vengono forniti anche esempi di dimostrazioni di proof-of-concept e si evidenzia il potenziale di LLM come risolutore generale di problemi. Inoltre, vengono presentati approcci come Chain of Thought, Tree of Thoughts, LLM+P, ReAct e Reflexion che consentono agli agenti autonomi di pianificare, riflettere su se stessi e migliorare iterativamente. Il nuovo contesto riguarda l'approccio Chain of Hindsight (CoH) che permette al modello di migliorare autonomamente i propri output attraverso un processo di apprendimento supervisionato. Viene anche presentato l'approccio Algorithm Distillation (AD) che applica lo stesso concetto alle traiettorie di apprendimento per compiti di reinforcement learning.\n" + "{'generate_final_summary': {'final_summary': 'The consolidated summary of the main themes from the provided documents is as follows:\\n\\n1. **Integration of Large Language Models (LLMs) in Autonomous Agents**: The documents explore the evolving role of LLMs in autonomous systems, emphasizing their enhanced reasoning and acting capabilities through methodologies that incorporate structured planning, memory systems, and tool use.\\n\\n2. **Core Components of Autonomous Agents**:\\n - **Planning**: Techniques like task decomposition (e.g., Chain of Thought) and external classical planners are utilized to facilitate long-term planning by breaking down complex tasks.\\n - **Memory**: The memory system is divided into short-term (in-context learning) and long-term memory, with parallels drawn between human memory and machine learning to improve agent performance.\\n - **Tool Use**: Agents utilize external APIs and algorithms to enhance problem-solving abilities, exemplified by frameworks like HuggingGPT that manage task workflows.\\n\\n3. **Neuro-Symbolic Architectures**: The integration of MRKL (Modular Reasoning, Knowledge, and Language) systems combines neural and symbolic expert modules with LLMs, addressing challenges in tasks such as verbal math problem-solving.\\n\\n4. **Specialized Applications**: Case studies, such as ChemCrow and projects in anticancer drug discovery, demonstrate the advantages of LLMs augmented with expert tools in specialized domains.\\n\\n5. **Challenges and Limitations**: The documents highlight challenges such as hallucination in model outputs and the finite context length of LLMs, which affects their ability to incorporate historical information and perform self-reflection. Techniques like Chain of Hindsight and Algorithm Distillation are discussed to enhance model performance through iterative learning.\\n\\n6. **Structured Software Development**: A systematic approach to creating Python software projects is emphasized, focusing on defining core components, managing dependencies, and adhering to best practices for documentation.\\n\\nOverall, the integration of structured planning, memory systems, and advanced tool use aims to enhance the capabilities of LLM-powered autonomous agents while addressing the challenges and limitations these technologies face in real-world applications.'}}\n" ] } ], "source": [ - "print(\"\\n\\n\".join(result[\"intermediate_steps\"][:3]))" + "print(step)" ] }, { "cell_type": "markdown", - "id": "0d8a8398-a43c-4f14-933c-c0743ae6ec40", + "id": "a9e33d11-7a2a-4693-8c87-88b88eebc896", "metadata": {}, "source": [ - "## Splitting and summarizing in a single chain\n", - "For convenience, we can wrap both the text splitting of our long document and summarizing in a single [chain](/docs/how_to/sequence):" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0ddd522e-30dc-4f6a-b993-c4f97e656c4f", - "metadata": {}, - "outputs": [], - "source": [ - "def split_text(text: str):\n", - " return text_splitter.create_documents([text])\n", + "In the corresponding [LangSmith trace](https://smith.langchain.com/public/9d7b1d50-e1d6-44c9-9ab2-eabef621c883/r) we can see the individual LLM calls, grouped under their respective nodes.\n", + "\n", + "### Go deeper\n", + " \n", + "**Customization** \n", + "\n", + "* As shown above, you can customize the LLMs and prompts for map and reduce stages.\n", + "\n", + "**Real-world use-case**\n", "\n", + "* See [this blog post](https://blog.langchain.dev/llms-to-improve-documentation/) case-study on analyzing user interactions (questions about LangChain documentation)! \n", + "* The blog post and associated [repo](https://github.com/mendableai/QA_clustering) also introduce clustering as a means of summarization.\n", + "* This opens up another path beyond the `stuff` or `map-reduce` approaches that is worth considering.\n", "\n", - "summarize_document_chain = split_text | chain" + "![Image description](../../static/img/summarization_use_case_3.png)" ] }, { "cell_type": "markdown", - "id": "a41e4a81-3e26-4753-95bd-f80633620121", + "id": "e8680f94-c872-4d36-92e5-1462ffeb577d", "metadata": {}, "source": [ "## Next steps\n", "\n", "We encourage you to check out the [how-to guides](/docs/how_to) for more detail on: \n", "\n", + "- Other summarization strategies, such as [iterative refinement](/docs/how_to/summarize_refine)\n", "- Built-in [document loaders](/docs/how_to/#document-loaders) and [text-splitters](/docs/how_to/#text-splitters)\n", "- Integrating various combine-document chains into a [RAG application](/docs/tutorials/rag/)\n", "- Incorporating retrieval into a [chatbot](/docs/how_to/chatbots_retrieval/)\n", "\n", "and other concepts." ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "db01bcf3-0186-4689-8f79-1a577e551cb1", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { From 0f7b8adddf379ed2eca54b0f24b6a80996e8e0fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Thin=20red=20line=20=E6=9C=AA=E6=9D=A5=E4=BA=A7=E5=93=81?= =?UTF-8?q?=E7=BB=8F=E7=90=86?= <66343787+jiru1997@users.noreply.github.com> Date: Mon, 19 Aug 2024 06:33:19 -0700 Subject: [PATCH 12/80] fix issue: cannot use document_variable_name to override context in create_stuff_documents_chain (#25531) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …he prompt in the create_stuff_documents_chain Thank you for contributing to LangChain! - [ ] **PR title**: "langchain:add document_variable_name in the function _validate_prompt in create_stuff_documents_chain" - [ ] **PR message**: - **Description:** add document_variable_name in the function _validate_prompt in create_stuff_documents_chain - **Issue:** according to the description of create_stuff_documents_chain function, the parameter document_variable_name can be used to override the "context" in the prompt, but in the function, _validate_prompt it still use DOCUMENTS_KEY to check if it is a valid prompt, the value of DOCUMENTS_KEY is always "context", so even through the user use document_variable_name to override it, the code still tries to check if "context" is in the prompt, and finally it reports error. so I use document_variable_name to replace DOCUMENTS_KEY, the default value of document_variable_name is "context" which is same as DOCUMENTS_KEY, but it can be override by users. - **Dependencies:** none - **Twitter handle:** https://x.com/xjr199703 - [ ] **Add tests and docs**: none - [ ] **Lint and test**: Run `make format`, `make lint` and `make test` from the root of the package(s) you've modified. See contribution guidelines for more: https://python.langchain.com/docs/contributing/ Additional guidelines: - Make sure optional dependencies are imported within a function. - Please do not add dependencies to pyproject.toml files (even optional ones) unless they are required for unit tests. - Most PRs should not touch more than one package. - Changes should be backwards compatible. - If you are adding something to community, do not re-import it in langchain. If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, ccurme, vbarda, hwchase17. --------- Co-authored-by: Chester Curme --- libs/langchain/langchain/chains/combine_documents/base.py | 8 ++++---- .../langchain/langchain/chains/combine_documents/stuff.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/libs/langchain/langchain/chains/combine_documents/base.py b/libs/langchain/langchain/chains/combine_documents/base.py index 90e965996de45..00b6002da0ed4 100644 --- a/libs/langchain/langchain/chains/combine_documents/base.py +++ b/libs/langchain/langchain/chains/combine_documents/base.py @@ -22,11 +22,11 @@ DEFAULT_DOCUMENT_PROMPT = PromptTemplate.from_template("{page_content}") -def _validate_prompt(prompt: BasePromptTemplate) -> None: - if DOCUMENTS_KEY not in prompt.input_variables: +def _validate_prompt(prompt: BasePromptTemplate, document_variable_name: str) -> None: + if document_variable_name not in prompt.input_variables: raise ValueError( - f"Prompt must accept {DOCUMENTS_KEY} as an input variable. Received prompt " - f"with input variables: {prompt.input_variables}" + f"Prompt must accept {document_variable_name} as an input variable. " + f"Received prompt with input variables: {prompt.input_variables}" ) diff --git a/libs/langchain/langchain/chains/combine_documents/stuff.py b/libs/langchain/langchain/chains/combine_documents/stuff.py index 5ffd86c971865..cdecec0f40b82 100644 --- a/libs/langchain/langchain/chains/combine_documents/stuff.py +++ b/libs/langchain/langchain/chains/combine_documents/stuff.py @@ -76,7 +76,7 @@ def create_stuff_documents_chain( chain.invoke({"context": docs}) """ # noqa: E501 - _validate_prompt(prompt) + _validate_prompt(prompt, document_variable_name) _document_prompt = document_prompt or DEFAULT_DOCUMENT_PROMPT _output_parser = output_parser or StrOutputParser() From 75c3c81b8c3fe534bcdc1ebbc2147b41079156e0 Mon Sep 17 00:00:00 2001 From: Mohammad Mohtashim <45242107+keenborder786@users.noreply.github.com> Date: Mon, 19 Aug 2024 18:36:42 +0500 Subject: [PATCH 13/80] [Community]: Fix - Open AI Whisper `client.audio.transcriptions` returning Text Object which raises error (#25271) - **Description:** The following [line](https://github.com/langchain-ai/langchain/blob/fd546196ef0fafa4a4cd7bb7ebb1771ef599f372/libs/community/langchain_community/document_loaders/parsers/audio.py#L117) in `OpenAIWhisperParser` returns a text object for some odd reason despite the official documentation saying it should return `Transcript` Instance which should have the text attribute. But for the example given in the issue and even when I tried running on my own, I was directly getting the text. The small PR accounts for that. - **Issue:** : #25218 I was able to replicate the error even without the GenericLoader as shown below and the issue was with `OpenAIWhisperParser` ```python parser = OpenAIWhisperParser(api_key="sk-fxxxxxxxxx", response_format="srt", temperature=0) list(parser.lazy_parse(Blob.from_path('path_to_file.m4a'))) ``` --- .../langchain_community/document_loaders/parsers/audio.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/libs/community/langchain_community/document_loaders/parsers/audio.py b/libs/community/langchain_community/document_loaders/parsers/audio.py index c8fa4c3ed3951..9741a32f3de17 100644 --- a/libs/community/langchain_community/document_loaders/parsers/audio.py +++ b/libs/community/langchain_community/document_loaders/parsers/audio.py @@ -129,7 +129,9 @@ def lazy_parse(self, blob: Blob) -> Iterator[Document]: continue yield Document( - page_content=transcript.text, + page_content=transcript.text + if not isinstance(transcript, str) + else transcript, metadata={"source": blob.source, "chunk": split_number}, ) From 5a3aaae6dc332a518b06e9d77b06e35047c10af2 Mon Sep 17 00:00:00 2001 From: ccurme Date: Mon, 19 Aug 2024 09:58:06 -0400 Subject: [PATCH 14/80] groq[patch]: update model used for llama tests (#25542) `llama-3.1-8b-instant` often fails some of the tool calling standard tests. Here we update to `llama-3.1-70b-versatile`. --- libs/partners/groq/tests/integration_tests/test_standard.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/partners/groq/tests/integration_tests/test_standard.py b/libs/partners/groq/tests/integration_tests/test_standard.py index e701c726f1d7b..6feab74f60677 100644 --- a/libs/partners/groq/tests/integration_tests/test_standard.py +++ b/libs/partners/groq/tests/integration_tests/test_standard.py @@ -28,7 +28,7 @@ class TestGroqLlama(BaseTestGroq): @property def chat_model_params(self) -> dict: return { - "model": "llama-3.1-8b-instant", + "model": "llama-3.1-70b-versatile", "temperature": 0, "rate_limiter": rate_limiter, } From 015ab91b83ed47d61805b239141fc1f711a5bd30 Mon Sep 17 00:00:00 2001 From: maang-h <55082429+maang-h@users.noreply.github.com> Date: Mon, 19 Aug 2024 23:26:38 +0800 Subject: [PATCH 15/80] community[patch]: Add ToolMessage for ChatZhipuAI (#25547) - **Description:** Add ToolMessage for `ChatZhipuAI` to solve the issue #25490 --- .../langchain_community/chat_models/zhipuai.py | 17 +++++++++++++++++ .../unit_tests/chat_models/test_zhipuai.py | 18 +++++++++++++++++- 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/libs/community/langchain_community/chat_models/zhipuai.py b/libs/community/langchain_community/chat_models/zhipuai.py index 03d858398766f..349a8b59ca4d8 100644 --- a/libs/community/langchain_community/chat_models/zhipuai.py +++ b/libs/community/langchain_community/chat_models/zhipuai.py @@ -42,6 +42,7 @@ HumanMessageChunk, SystemMessage, SystemMessageChunk, + ToolMessage, ) from langchain_core.output_parsers.base import OutputParserLike from langchain_core.output_parsers.openai_tools import ( @@ -150,6 +151,15 @@ def _convert_dict_to_message(dct: Dict[str, Any]) -> BaseMessage: if tool_calls is not None: additional_kwargs["tool_calls"] = tool_calls return AIMessage(content=content, additional_kwargs=additional_kwargs) + if role == "tool": + additional_kwargs = {} + if "name" in dct: + additional_kwargs["name"] = dct["name"] + return ToolMessage( + content=content, + tool_call_id=dct.get("tool_call_id"), # type: ignore[arg-type] + additional_kwargs=additional_kwargs, + ) return ChatMessage(role=role, content=content) # type: ignore[arg-type] @@ -171,6 +181,13 @@ def _convert_message_to_dict(message: BaseMessage) -> Dict[str, Any]: message_dict = {"role": "user", "content": message.content} elif isinstance(message, AIMessage): message_dict = {"role": "assistant", "content": message.content} + elif isinstance(message, ToolMessage): + message_dict = { + "role": "tool", + "content": message.content, + "tool_call_id": message.tool_call_id, + "name": message.name or message.additional_kwargs.get("name"), + } else: raise TypeError(f"Got unknown type '{message.__class__.__name__}'.") return message_dict diff --git a/libs/community/tests/unit_tests/chat_models/test_zhipuai.py b/libs/community/tests/unit_tests/chat_models/test_zhipuai.py index 5295b6f340b91..41d3e468f5100 100644 --- a/libs/community/tests/unit_tests/chat_models/test_zhipuai.py +++ b/libs/community/tests/unit_tests/chat_models/test_zhipuai.py @@ -1,8 +1,12 @@ """Test ZhipuAI Chat API wrapper""" import pytest +from langchain_core.messages import ToolMessage -from langchain_community.chat_models.zhipuai import ChatZhipuAI +from langchain_community.chat_models.zhipuai import ( + ChatZhipuAI, + _convert_message_to_dict, +) @pytest.mark.requires("httpx", "httpx_sse", "jwt") @@ -11,3 +15,15 @@ def test_zhipuai_model_param() -> None: assert llm.model_name == "foo" llm = ChatZhipuAI(api_key="test", model_name="foo") # type: ignore[call-arg] assert llm.model_name == "foo" + + +def test__convert_message_to_dict_with_tool() -> None: + message = ToolMessage(name="foo", content="bar", tool_call_id="abc123") + result = _convert_message_to_dict(message) + expected_output = { + "name": "foo", + "content": "bar", + "tool_call_id": "abc123", + "role": "tool", + } + assert result == expected_output From c5bf114c0f5e9518c418b68f524a9d1b2daa5297 Mon Sep 17 00:00:00 2001 From: ccurme Date: Mon, 19 Aug 2024 16:37:36 -0400 Subject: [PATCH 16/80] together, standard-tests: specify tool_choice in standard tests (#25548) Here we allow standard tests to specify a value for `tool_choice` via a `tool_choice_value` property, which defaults to None. Chat models [available in Together](https://docs.together.ai/docs/chat-models) have issues passing standard tool calling tests: - llama 3.1 models currently [appear to rely on user-side parsing](https://docs.together.ai/docs/llama-3-function-calling) in Together; - Mixtral-8x7B and Mistral-7B (currently tested) consistently do not call tools in some tests. Specifying tool_choice also lets us remove an existing `xfail` and use a smaller model in Groq tests. --- .../integration_tests/test_chat_models.py | 37 +++++++++++++++++++ .../tests/integration_tests/test_standard.py | 15 +++++++- .../tests/integration_tests/test_standard.py | 7 +++- .../test_chat_models_standard.py | 9 +++-- .../integration_tests/chat_models.py | 20 ++++++++-- .../unit_tests/chat_models.py | 5 +++ 6 files changed, 83 insertions(+), 10 deletions(-) diff --git a/libs/partners/groq/tests/integration_tests/test_chat_models.py b/libs/partners/groq/tests/integration_tests/test_chat_models.py index be8814bc3e7ad..2e5a9620b2287 100644 --- a/libs/partners/groq/tests/integration_tests/test_chat_models.py +++ b/libs/partners/groq/tests/integration_tests/test_chat_models.py @@ -14,6 +14,7 @@ ) from langchain_core.outputs import ChatGeneration, LLMResult from langchain_core.pydantic_v1 import BaseModel, Field +from langchain_core.tools import tool from langchain_groq import ChatGroq from tests.unit_tests.fake.callbacks import ( @@ -393,6 +394,42 @@ class Joke(BaseModel): assert len(result.punchline) != 0 +def test_tool_calling_no_arguments() -> None: + # Note: this is a variant of a test in langchain_standard_tests + # that as of 2024-08-19 fails with "Failed to call a function. Please + # adjust your prompt." when `tool_choice="any"` is specified, but + # passes when `tool_choice` is not specified. + model = ChatGroq(model="llama-3.1-70b-versatile", temperature=0) # type: ignore[call-arg] + + @tool + def magic_function_no_args() -> int: + """Calculates a magic function.""" + return 5 + + model_with_tools = model.bind_tools([magic_function_no_args]) + query = "What is the value of magic_function()? Use the tool." + result = model_with_tools.invoke(query) + assert isinstance(result, AIMessage) + assert len(result.tool_calls) == 1 + tool_call = result.tool_calls[0] + assert tool_call["name"] == "magic_function_no_args" + assert tool_call["args"] == {} + assert tool_call["id"] is not None + assert tool_call["type"] == "tool_call" + + # Test streaming + full: Optional[BaseMessageChunk] = None + for chunk in model_with_tools.stream(query): + full = chunk if full is None else full + chunk # type: ignore + assert isinstance(full, AIMessage) + assert len(full.tool_calls) == 1 + tool_call = full.tool_calls[0] + assert tool_call["name"] == "magic_function_no_args" + assert tool_call["args"] == {} + assert tool_call["id"] is not None + assert tool_call["type"] == "tool_call" + + # Groq does not currently support N > 1 # @pytest.mark.scheduled # def test_chat_multiple_completions() -> None: diff --git a/libs/partners/groq/tests/integration_tests/test_standard.py b/libs/partners/groq/tests/integration_tests/test_standard.py index 6feab74f60677..38fe554c5c779 100644 --- a/libs/partners/groq/tests/integration_tests/test_standard.py +++ b/libs/partners/groq/tests/integration_tests/test_standard.py @@ -1,6 +1,6 @@ """Standard LangChain interface tests""" -from typing import Type +from typing import Optional, Type import pytest from langchain_core.language_models import BaseChatModel @@ -28,11 +28,22 @@ class TestGroqLlama(BaseTestGroq): @property def chat_model_params(self) -> dict: return { - "model": "llama-3.1-70b-versatile", + "model": "llama-3.1-8b-instant", "temperature": 0, "rate_limiter": rate_limiter, } + @property + def tool_choice_value(self) -> Optional[str]: + """Value to use for tool choice when used in tests.""" + return "any" + + @pytest.mark.xfail( + reason=("Fails with 'Failed to call a function. Please adjust your prompt.'") + ) + def test_tool_calling_with_no_arguments(self, model: BaseChatModel) -> None: + super().test_tool_calling_with_no_arguments(model) + @pytest.mark.xfail( reason=("Fails with 'Failed to call a function. Please adjust your prompt.'") ) diff --git a/libs/partners/mistralai/tests/integration_tests/test_standard.py b/libs/partners/mistralai/tests/integration_tests/test_standard.py index 965cd03c4b178..cea6399ee4cd8 100644 --- a/libs/partners/mistralai/tests/integration_tests/test_standard.py +++ b/libs/partners/mistralai/tests/integration_tests/test_standard.py @@ -1,6 +1,6 @@ """Standard LangChain interface tests""" -from typing import Type +from typing import Optional, Type from langchain_core.language_models import BaseChatModel from langchain_standard_tests.integration_tests import ( # type: ignore[import-not-found] @@ -18,3 +18,8 @@ def chat_model_class(self) -> Type[BaseChatModel]: @property def chat_model_params(self) -> dict: return {"model": "mistral-large-latest", "temperature": 0} + + @property + def tool_choice_value(self) -> Optional[str]: + """Value to use for tool choice when used in tests.""" + return "any" diff --git a/libs/partners/together/tests/integration_tests/test_chat_models_standard.py b/libs/partners/together/tests/integration_tests/test_chat_models_standard.py index 2250873f4b659..18c167f8a91dc 100644 --- a/libs/partners/together/tests/integration_tests/test_chat_models_standard.py +++ b/libs/partners/together/tests/integration_tests/test_chat_models_standard.py @@ -1,6 +1,6 @@ """Standard LangChain interface tests""" -from typing import Type +from typing import Optional, Type import pytest from langchain_core.language_models import BaseChatModel @@ -28,9 +28,10 @@ def chat_model_params(self) -> dict: "rate_limiter": rate_limiter, } - @pytest.mark.xfail(reason=("May not call a tool.")) - def test_tool_calling_with_no_arguments(self, model: BaseChatModel) -> None: - super().test_tool_calling_with_no_arguments(model) + @property + def tool_choice_value(self) -> Optional[str]: + """Value to use for tool choice when used in tests.""" + return "tool_name" @pytest.mark.xfail(reason="Not yet supported.") def test_usage_metadata_streaming(self, model: BaseChatModel) -> None: diff --git a/libs/standard-tests/langchain_standard_tests/integration_tests/chat_models.py b/libs/standard-tests/langchain_standard_tests/integration_tests/chat_models.py index bcb47a4c151a7..32d922a3e4a36 100644 --- a/libs/standard-tests/langchain_standard_tests/integration_tests/chat_models.py +++ b/libs/standard-tests/langchain_standard_tests/integration_tests/chat_models.py @@ -170,7 +170,11 @@ def test_stop_sequence(self, model: BaseChatModel) -> None: def test_tool_calling(self, model: BaseChatModel) -> None: if not self.has_tool_calling: pytest.skip("Test requires tool calling.") - model_with_tools = model.bind_tools([magic_function]) + if self.tool_choice_value == "tool_name": + tool_choice: Optional[str] = "magic_function" + else: + tool_choice = self.tool_choice_value + model_with_tools = model.bind_tools([magic_function], tool_choice=tool_choice) # Test invoke query = "What is the value of magic_function(3)? Use the tool." @@ -188,7 +192,13 @@ def test_tool_calling_with_no_arguments(self, model: BaseChatModel) -> None: if not self.has_tool_calling: pytest.skip("Test requires tool calling.") - model_with_tools = model.bind_tools([magic_function_no_args]) + if self.tool_choice_value == "tool_name": + tool_choice: Optional[str] = "magic_function_no_args" + else: + tool_choice = self.tool_choice_value + model_with_tools = model.bind_tools( + [magic_function_no_args], tool_choice=tool_choice + ) query = "What is the value of magic_function()? Use the tool." result = model_with_tools.invoke(query) _validate_tool_call_message_no_args(result) @@ -212,7 +222,11 @@ def test_bind_runnables_as_tools(self, model: BaseChatModel) -> None: name="greeting_generator", description="Generate a greeting in a particular style of speaking.", ) - model_with_tools = model.bind_tools([tool_]) + if self.tool_choice_value == "tool_name": + tool_choice: Optional[str] = "greeting_generator" + else: + tool_choice = self.tool_choice_value + model_with_tools = model.bind_tools([tool_], tool_choice=tool_choice) query = "Using the tool, generate a Pirate greeting." result = model_with_tools.invoke(query) assert isinstance(result, AIMessage) diff --git a/libs/standard-tests/langchain_standard_tests/unit_tests/chat_models.py b/libs/standard-tests/langchain_standard_tests/unit_tests/chat_models.py index ed73771dbdae0..6597b16177be4 100644 --- a/libs/standard-tests/langchain_standard_tests/unit_tests/chat_models.py +++ b/libs/standard-tests/langchain_standard_tests/unit_tests/chat_models.py @@ -96,6 +96,11 @@ def model(self) -> BaseChatModel: def has_tool_calling(self) -> bool: return self.chat_model_class.bind_tools is not BaseChatModel.bind_tools + @property + def tool_choice_value(self) -> Optional[str]: + """Value to use for tool choice when used in tests.""" + return None + @property def has_structured_output(self) -> bool: return ( From 6b98207edaefdae997dda521f8a2e2eed3646c80 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Mon, 19 Aug 2024 15:27:36 -0700 Subject: [PATCH 17/80] infra: test chat prompt ser/des (#25557) --- .../prompts/__snapshots__/test_chat.ambr | 864 ++++++++++++++++++ .../tests/unit_tests/prompts/test_chat.py | 59 +- 2 files changed, 918 insertions(+), 5 deletions(-) diff --git a/libs/core/tests/unit_tests/prompts/__snapshots__/test_chat.ambr b/libs/core/tests/unit_tests/prompts/__snapshots__/test_chat.ambr index 28ef4b9f0fa7a..901b35e75c719 100644 --- a/libs/core/tests/unit_tests/prompts/__snapshots__/test_chat.ambr +++ b/libs/core/tests/unit_tests/prompts/__snapshots__/test_chat.ambr @@ -1451,3 +1451,867 @@ 'type': 'constructor', }) # --- +# name: test_chat_tmpl_serdes + dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'PromptInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain', + 'prompts', + 'chat', + 'ChatPromptTemplate', + ]), + 'name': 'ChatPromptTemplate', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'ChatPromptTemplateOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain', + 'prompts', + 'chat', + 'ChatPromptTemplate', + ]), + 'kwargs': dict({ + 'input_variables': list([ + 'foo', + 'more_history', + 'my_image', + 'my_other_image', + 'name', + ]), + 'messages': list([ + dict({ + 'id': list([ + 'langchain', + 'prompts', + 'chat', + 'SystemMessagePromptTemplate', + ]), + 'kwargs': dict({ + 'prompt': dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'PromptInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain', + 'prompts', + 'prompt', + 'PromptTemplate', + ]), + 'name': 'PromptTemplate', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'PromptTemplateOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain', + 'prompts', + 'prompt', + 'PromptTemplate', + ]), + 'kwargs': dict({ + 'input_variables': list([ + 'name', + ]), + 'template': 'You are an AI assistant named {name}.', + 'template_format': 'f-string', + }), + 'lc': 1, + 'name': 'PromptTemplate', + 'type': 'constructor', + }), + }), + 'lc': 1, + 'type': 'constructor', + }), + dict({ + 'id': list([ + 'langchain', + 'prompts', + 'chat', + 'SystemMessagePromptTemplate', + ]), + 'kwargs': dict({ + 'prompt': list([ + dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'PromptInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain', + 'prompts', + 'prompt', + 'PromptTemplate', + ]), + 'name': 'PromptTemplate', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'PromptTemplateOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain', + 'prompts', + 'prompt', + 'PromptTemplate', + ]), + 'kwargs': dict({ + 'input_variables': list([ + 'name', + ]), + 'template': 'You are an AI assistant named {name}.', + 'template_format': 'f-string', + }), + 'lc': 1, + 'name': 'PromptTemplate', + 'type': 'constructor', + }), + ]), + }), + 'lc': 1, + 'type': 'constructor', + }), + dict({ + 'id': list([ + 'langchain', + 'prompts', + 'chat', + 'SystemMessagePromptTemplate', + ]), + 'kwargs': dict({ + 'prompt': dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'PromptInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain', + 'prompts', + 'prompt', + 'PromptTemplate', + ]), + 'name': 'PromptTemplate', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'PromptTemplateOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain', + 'prompts', + 'prompt', + 'PromptTemplate', + ]), + 'kwargs': dict({ + 'input_variables': list([ + 'foo', + ]), + 'template': 'you are {foo}', + 'template_format': 'f-string', + }), + 'lc': 1, + 'name': 'PromptTemplate', + 'type': 'constructor', + }), + }), + 'lc': 1, + 'type': 'constructor', + }), + dict({ + 'id': list([ + 'langchain', + 'prompts', + 'chat', + 'HumanMessagePromptTemplate', + ]), + 'kwargs': dict({ + 'prompt': list([ + dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'PromptInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain', + 'prompts', + 'prompt', + 'PromptTemplate', + ]), + 'name': 'PromptTemplate', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'PromptTemplateOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain', + 'prompts', + 'prompt', + 'PromptTemplate', + ]), + 'kwargs': dict({ + 'input_variables': list([ + ]), + 'template': 'hello', + 'template_format': 'f-string', + }), + 'lc': 1, + 'name': 'PromptTemplate', + 'type': 'constructor', + }), + dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'PromptInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain', + 'prompts', + 'prompt', + 'PromptTemplate', + ]), + 'name': 'PromptTemplate', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'PromptTemplateOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain', + 'prompts', + 'prompt', + 'PromptTemplate', + ]), + 'kwargs': dict({ + 'input_variables': list([ + ]), + 'template': "What's in this image?", + 'template_format': 'f-string', + }), + 'lc': 1, + 'name': 'PromptTemplate', + 'type': 'constructor', + }), + dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'PromptInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain', + 'prompts', + 'prompt', + 'PromptTemplate', + ]), + 'name': 'PromptTemplate', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'PromptTemplateOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain', + 'prompts', + 'prompt', + 'PromptTemplate', + ]), + 'kwargs': dict({ + 'input_variables': list([ + ]), + 'template': "What's in this image?", + 'template_format': 'f-string', + }), + 'lc': 1, + 'name': 'PromptTemplate', + 'type': 'constructor', + }), + dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'PromptInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain', + 'prompts', + 'image', + 'ImagePromptTemplate', + ]), + 'name': 'ImagePromptTemplate', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'ImagePromptTemplateOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain', + 'prompts', + 'image', + 'ImagePromptTemplate', + ]), + 'kwargs': dict({ + 'input_variables': list([ + 'my_image', + ]), + 'template': dict({ + 'url': 'data:image/jpeg;base64,{my_image}', + }), + }), + 'lc': 1, + 'name': 'ImagePromptTemplate', + 'type': 'constructor', + }), + dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'PromptInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain', + 'prompts', + 'image', + 'ImagePromptTemplate', + ]), + 'name': 'ImagePromptTemplate', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'ImagePromptTemplateOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain', + 'prompts', + 'image', + 'ImagePromptTemplate', + ]), + 'kwargs': dict({ + 'input_variables': list([ + 'my_image', + ]), + 'template': dict({ + 'url': 'data:image/jpeg;base64,{my_image}', + }), + }), + 'lc': 1, + 'name': 'ImagePromptTemplate', + 'type': 'constructor', + }), + dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'PromptInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain', + 'prompts', + 'image', + 'ImagePromptTemplate', + ]), + 'name': 'ImagePromptTemplate', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'ImagePromptTemplateOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain', + 'prompts', + 'image', + 'ImagePromptTemplate', + ]), + 'kwargs': dict({ + 'input_variables': list([ + 'my_other_image', + ]), + 'template': dict({ + 'url': '{my_other_image}', + }), + }), + 'lc': 1, + 'name': 'ImagePromptTemplate', + 'type': 'constructor', + }), + dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'PromptInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain', + 'prompts', + 'image', + 'ImagePromptTemplate', + ]), + 'name': 'ImagePromptTemplate', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'ImagePromptTemplateOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain', + 'prompts', + 'image', + 'ImagePromptTemplate', + ]), + 'kwargs': dict({ + 'input_variables': list([ + 'my_other_image', + ]), + 'template': dict({ + 'detail': 'medium', + 'url': '{my_other_image}', + }), + }), + 'lc': 1, + 'name': 'ImagePromptTemplate', + 'type': 'constructor', + }), + dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'PromptInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain', + 'prompts', + 'image', + 'ImagePromptTemplate', + ]), + 'name': 'ImagePromptTemplate', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'ImagePromptTemplateOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain', + 'prompts', + 'image', + 'ImagePromptTemplate', + ]), + 'kwargs': dict({ + 'input_variables': list([ + ]), + 'template': dict({ + 'url': 'https://www.langchain.com/image.png', + }), + }), + 'lc': 1, + 'name': 'ImagePromptTemplate', + 'type': 'constructor', + }), + dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'PromptInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain', + 'prompts', + 'image', + 'ImagePromptTemplate', + ]), + 'name': 'ImagePromptTemplate', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'ImagePromptTemplateOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain', + 'prompts', + 'image', + 'ImagePromptTemplate', + ]), + 'kwargs': dict({ + 'input_variables': list([ + ]), + 'template': dict({ + 'url': 'data:image/jpeg;base64,foobar', + }), + }), + 'lc': 1, + 'name': 'ImagePromptTemplate', + 'type': 'constructor', + }), + dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'PromptInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain', + 'prompts', + 'image', + 'ImagePromptTemplate', + ]), + 'name': 'ImagePromptTemplate', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'ImagePromptTemplateOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain', + 'prompts', + 'image', + 'ImagePromptTemplate', + ]), + 'kwargs': dict({ + 'input_variables': list([ + ]), + 'template': dict({ + 'url': 'data:image/jpeg;base64,foobar', + }), + }), + 'lc': 1, + 'name': 'ImagePromptTemplate', + 'type': 'constructor', + }), + ]), + }), + 'lc': 1, + 'type': 'constructor', + }), + dict({ + 'id': list([ + 'langchain', + 'prompts', + 'chat', + 'MessagesPlaceholder', + ]), + 'kwargs': dict({ + 'optional': True, + 'variable_name': 'chat_history', + }), + 'lc': 1, + 'type': 'constructor', + }), + dict({ + 'id': list([ + 'langchain', + 'prompts', + 'chat', + 'MessagesPlaceholder', + ]), + 'kwargs': dict({ + 'variable_name': 'more_history', + }), + 'lc': 1, + 'type': 'constructor', + }), + ]), + 'optional_variables': list([ + 'chat_history', + ]), + 'partial_variables': dict({ + 'chat_history': list([ + ]), + }), + }), + 'lc': 1, + 'name': 'ChatPromptTemplate', + 'type': 'constructor', + }) +# --- diff --git a/libs/core/tests/unit_tests/prompts/test_chat.py b/libs/core/tests/unit_tests/prompts/test_chat.py index d6e383788395b..a280ff1dbb670 100644 --- a/libs/core/tests/unit_tests/prompts/test_chat.py +++ b/libs/core/tests/unit_tests/prompts/test_chat.py @@ -1,7 +1,7 @@ import base64 import tempfile from pathlib import Path -from typing import Any, List, Union +from typing import Any, List, Tuple, Union, cast import pytest from syrupy import SnapshotAssertion @@ -565,7 +565,7 @@ async def test_chat_tmpl_from_messages_multipart_text_with_template() -> None: async def test_chat_tmpl_from_messages_multipart_image() -> None: """Test multipart image URL formatting.""" base64_image = "iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAA" - other_base64_image = "iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAA" + other_base64_image = "other_iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAA" template = ChatPromptTemplate.from_messages( [ ("system", "You are an AI assistant named {name}."), @@ -609,9 +609,7 @@ async def test_chat_tmpl_from_messages_multipart_image() -> None: }, { "type": "image_url", - "image_url": { - "url": f"data:image/jpeg;base64,{other_base64_image}" - }, + "image_url": {"url": f"data:image/jpeg;base64,{base64_image}"}, }, { "type": "image_url", @@ -814,3 +812,54 @@ def test_chat_prompt_w_msgs_placeholder_ser_des(snapshot: SnapshotAssertion) -> assert load(dumpd(MessagesPlaceholder("bar"))) == MessagesPlaceholder("bar") assert dumpd(prompt) == snapshot(name="chat_prompt") assert load(dumpd(prompt)) == prompt + + +async def test_chat_tmpl_serdes(snapshot: SnapshotAssertion) -> None: + """Test chat prompt template ser/des.""" + template = ChatPromptTemplate( + [ + ("system", "You are an AI assistant named {name}."), + ("system", [{"text": "You are an AI assistant named {name}."}]), + SystemMessagePromptTemplate.from_template("you are {foo}"), + cast( + Tuple, + ( + "human", + [ + "hello", + {"text": "What's in this image?"}, + {"type": "text", "text": "What's in this image?"}, + { + "type": "image_url", + "image_url": "data:image/jpeg;base64,{my_image}", + }, + { + "type": "image_url", + "image_url": {"url": "data:image/jpeg;base64,{my_image}"}, + }, + {"type": "image_url", "image_url": "{my_other_image}"}, + { + "type": "image_url", + "image_url": { + "url": "{my_other_image}", + "detail": "medium", + }, + }, + { + "type": "image_url", + "image_url": {"url": "https://www.langchain.com/image.png"}, + }, + { + "type": "image_url", + "image_url": {"url": "data:image/jpeg;base64,foobar"}, + }, + {"image_url": {"url": "data:image/jpeg;base64,foobar"}}, + ], + ), + ), + ("placeholder", "{chat_history}"), + MessagesPlaceholder("more_history", optional=False), + ] + ) + assert dumpd(template) == snapshot() + assert load(dumpd(template)) == template From dd2d094adc4506f31adb282baea8c25bc93015a6 Mon Sep 17 00:00:00 2001 From: Erick Friis Date: Mon, 19 Aug 2024 15:48:26 -0700 Subject: [PATCH 18/80] infra: remove huggingface from ci tree (#25559) --- .github/scripts/check_diff.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/scripts/check_diff.py b/.github/scripts/check_diff.py index f415db104e402..4ee411a00a1cd 100644 --- a/.github/scripts/check_diff.py +++ b/.github/scripts/check_diff.py @@ -68,6 +68,13 @@ def dependents_graph() -> dict: if "langchain" in dep: dependents[dep].add(pkg_dir) + + # remove huggingface from dependents because of CI instability + # specifically in huggingface jobs + # https://github.com/langchain-ai/langchain/issues/25558 + for k in dependents: + if "libs/partners/huggingface" in dependents[k]: + dependents[k].remove("libs/partners/huggingface") return dependents From e01c6789c47842475cd074f87a9263674ec37d58 Mon Sep 17 00:00:00 2001 From: Erick Friis Date: Mon, 19 Aug 2024 17:29:09 -0700 Subject: [PATCH 19/80] core,community: add beta decorator to missed GraphVectorStore extensions (#25562) --- .../langchain_community/graph_vectorstores/cassandra.py | 2 ++ .../graph_vectorstores/extractors/gliner_link_extractor.py | 2 ++ .../extractors/hierarchy_link_extractor.py | 2 ++ .../graph_vectorstores/extractors/html_link_extractor.py | 2 ++ .../graph_vectorstores/extractors/keybert_link_extractor.py | 2 ++ .../graph_vectorstores/extractors/link_extractor.py | 2 ++ .../graph_vectorstores/extractors/link_extractor_adapter.py | 2 ++ .../extractors/link_extractor_transformer.py | 2 ++ libs/core/langchain_core/graph_vectorstores/base.py | 2 ++ libs/core/langchain_core/graph_vectorstores/links.py | 5 +++++ 10 files changed, 23 insertions(+) diff --git a/libs/community/langchain_community/graph_vectorstores/cassandra.py b/libs/community/langchain_community/graph_vectorstores/cassandra.py index da9b09e993ee1..6fb04c60a13e9 100644 --- a/libs/community/langchain_community/graph_vectorstores/cassandra.py +++ b/libs/community/langchain_community/graph_vectorstores/cassandra.py @@ -9,6 +9,7 @@ Type, ) +from langchain_core._api import beta from langchain_core.documents import Document from langchain_core.embeddings import Embeddings from langchain_core.graph_vectorstores.base import ( @@ -23,6 +24,7 @@ from cassandra.cluster import Session +@beta() class CassandraGraphVectorStore(GraphVectorStore): def __init__( self, diff --git a/libs/community/langchain_community/graph_vectorstores/extractors/gliner_link_extractor.py b/libs/community/langchain_community/graph_vectorstores/extractors/gliner_link_extractor.py index f5aa2eb27aeba..a485f849c7a6c 100644 --- a/libs/community/langchain_community/graph_vectorstores/extractors/gliner_link_extractor.py +++ b/libs/community/langchain_community/graph_vectorstores/extractors/gliner_link_extractor.py @@ -1,5 +1,6 @@ from typing import Any, Dict, Iterable, List, Optional, Set, Union +from langchain_core._api import beta from langchain_core.documents import Document from langchain_core.graph_vectorstores.links import Link @@ -11,6 +12,7 @@ GLiNERInput = Union[str, Document] +@beta() class GLiNERLinkExtractor(LinkExtractor[GLiNERInput]): """Link documents with common named entities using GLiNER .""" diff --git a/libs/community/langchain_community/graph_vectorstores/extractors/hierarchy_link_extractor.py b/libs/community/langchain_community/graph_vectorstores/extractors/hierarchy_link_extractor.py index 7a324df261461..525445f0157d6 100644 --- a/libs/community/langchain_community/graph_vectorstores/extractors/hierarchy_link_extractor.py +++ b/libs/community/langchain_community/graph_vectorstores/extractors/hierarchy_link_extractor.py @@ -1,5 +1,6 @@ from typing import Callable, List, Set +from langchain_core._api import beta from langchain_core.documents import Document from langchain_core.graph_vectorstores.links import Link @@ -18,6 +19,7 @@ _SIBLING: str = "s:" +@beta() class HierarchyLinkExtractor(LinkExtractor[HierarchyInput]): def __init__( self, diff --git a/libs/community/langchain_community/graph_vectorstores/extractors/html_link_extractor.py b/libs/community/langchain_community/graph_vectorstores/extractors/html_link_extractor.py index 49ff1703a3a03..bc820958473fd 100644 --- a/libs/community/langchain_community/graph_vectorstores/extractors/html_link_extractor.py +++ b/libs/community/langchain_community/graph_vectorstores/extractors/html_link_extractor.py @@ -4,6 +4,7 @@ from typing import TYPE_CHECKING, List, Optional, Set, Union from urllib.parse import urldefrag, urljoin, urlparse +from langchain_core._api import beta from langchain_core.documents import Document from langchain_core.graph_vectorstores import Link @@ -61,6 +62,7 @@ class HtmlInput: base_url: str +@beta() class HtmlLinkExtractor(LinkExtractor[HtmlInput]): def __init__(self, *, kind: str = "hyperlink", drop_fragments: bool = True): """Extract hyperlinks from HTML content. diff --git a/libs/community/langchain_community/graph_vectorstores/extractors/keybert_link_extractor.py b/libs/community/langchain_community/graph_vectorstores/extractors/keybert_link_extractor.py index aee7898a5c125..54be34d46319f 100644 --- a/libs/community/langchain_community/graph_vectorstores/extractors/keybert_link_extractor.py +++ b/libs/community/langchain_community/graph_vectorstores/extractors/keybert_link_extractor.py @@ -1,5 +1,6 @@ from typing import Any, Dict, Iterable, Optional, Set, Union +from langchain_core._api import beta from langchain_core.documents import Document from langchain_core.graph_vectorstores.links import Link @@ -10,6 +11,7 @@ KeybertInput = Union[str, Document] +@beta() class KeybertLinkExtractor(LinkExtractor[KeybertInput]): def __init__( self, diff --git a/libs/community/langchain_community/graph_vectorstores/extractors/link_extractor.py b/libs/community/langchain_community/graph_vectorstores/extractors/link_extractor.py index 619ba2a6d1366..45b8a526f9112 100644 --- a/libs/community/langchain_community/graph_vectorstores/extractors/link_extractor.py +++ b/libs/community/langchain_community/graph_vectorstores/extractors/link_extractor.py @@ -3,6 +3,7 @@ from abc import ABC, abstractmethod from typing import Generic, Iterable, Set, TypeVar +from langchain_core._api import beta from langchain_core.graph_vectorstores import Link InputT = TypeVar("InputT") @@ -10,6 +11,7 @@ METADATA_LINKS_KEY = "links" +@beta() class LinkExtractor(ABC, Generic[InputT]): """Interface for extracting links (incoming, outgoing, bidirectional).""" diff --git a/libs/community/langchain_community/graph_vectorstores/extractors/link_extractor_adapter.py b/libs/community/langchain_community/graph_vectorstores/extractors/link_extractor_adapter.py index 19af73bab980a..68dfbeb7cbf43 100644 --- a/libs/community/langchain_community/graph_vectorstores/extractors/link_extractor_adapter.py +++ b/libs/community/langchain_community/graph_vectorstores/extractors/link_extractor_adapter.py @@ -1,5 +1,6 @@ from typing import Callable, Iterable, Set, TypeVar +from langchain_core._api import beta from langchain_core.graph_vectorstores import Link from langchain_community.graph_vectorstores.extractors.link_extractor import ( @@ -10,6 +11,7 @@ UnderlyingInputT = TypeVar("UnderlyingInputT") +@beta() class LinkExtractorAdapter(LinkExtractor[InputT]): def __init__( self, diff --git a/libs/community/langchain_community/graph_vectorstores/extractors/link_extractor_transformer.py b/libs/community/langchain_community/graph_vectorstores/extractors/link_extractor_transformer.py index 3a5b4e83a52df..752b4b4986168 100644 --- a/libs/community/langchain_community/graph_vectorstores/extractors/link_extractor_transformer.py +++ b/libs/community/langchain_community/graph_vectorstores/extractors/link_extractor_transformer.py @@ -1,5 +1,6 @@ from typing import Any, Sequence +from langchain_core._api import beta from langchain_core.documents import Document from langchain_core.documents.transformers import BaseDocumentTransformer from langchain_core.graph_vectorstores.links import copy_with_links @@ -9,6 +10,7 @@ ) +@beta() class LinkExtractorTransformer(BaseDocumentTransformer): """DocumentTransformer for applying one or more LinkExtractors. diff --git a/libs/core/langchain_core/graph_vectorstores/base.py b/libs/core/langchain_core/graph_vectorstores/base.py index 2235dd054fda0..97eec8d5cb496 100644 --- a/libs/core/langchain_core/graph_vectorstores/base.py +++ b/libs/core/langchain_core/graph_vectorstores/base.py @@ -32,6 +32,7 @@ def _has_next(iterator: Iterator) -> bool: return next(iterator, sentinel) is not sentinel +@beta() class Node(Serializable): """Node in the GraphVectorStore. @@ -115,6 +116,7 @@ def _documents_to_nodes(documents: Iterable[Document]) -> Iterator[Node]: ) +@beta() def nodes_to_documents(nodes: Iterable[Node]) -> Iterator[Document]: for node in nodes: metadata = node.metadata.copy() diff --git a/libs/core/langchain_core/graph_vectorstores/links.py b/libs/core/langchain_core/graph_vectorstores/links.py index 11c95986ee9db..1e2353b0b4bb6 100644 --- a/libs/core/langchain_core/graph_vectorstores/links.py +++ b/libs/core/langchain_core/graph_vectorstores/links.py @@ -1,9 +1,11 @@ from dataclasses import dataclass from typing import Iterable, List, Literal, Union +from langchain_core._api import beta from langchain_core.documents import Document +@beta() @dataclass(frozen=True) class Link: """A link to/from a tag of a given tag. @@ -38,6 +40,7 @@ def bidir(kind: str, tag: str) -> "Link": METADATA_LINKS_KEY = "links" +@beta() def get_links(doc: Document) -> List[Link]: """Get the links from a document. Args: @@ -54,6 +57,7 @@ def get_links(doc: Document) -> List[Link]: return links +@beta() def add_links(doc: Document, *links: Union[Link, Iterable[Link]]) -> None: """Add links to the given metadata. Args: @@ -68,6 +72,7 @@ def add_links(doc: Document, *links: Union[Link, Iterable[Link]]) -> None: links_in_metadata.append(link) +@beta() def copy_with_links(doc: Document, *links: Union[Link, Iterable[Link]]) -> Document: """Return a document with the given links added. From 4bd005adb69e5bc8b85a1e1504ba1d2b01316bf9 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Tue, 20 Aug 2024 00:21:22 -0700 Subject: [PATCH 20/80] core[patch]: Allow bound models as token_counter in trim_messages (#25563) --- libs/core/langchain_core/messages/utils.py | 30 ++++++++++++------- .../tests/unit_tests/messages/test_utils.py | 19 ++++++++++++ 2 files changed, 38 insertions(+), 11 deletions(-) diff --git a/libs/core/langchain_core/messages/utils.py b/libs/core/langchain_core/messages/utils.py index b22c927ad4d06..18225c53228df 100644 --- a/libs/core/langchain_core/messages/utils.py +++ b/libs/core/langchain_core/messages/utils.py @@ -514,6 +514,8 @@ def merge_message_runs( return merged +# TODO: Update so validation errors (for token_counter, for example) are raised on +# init not at runtime. @_runnable_support def trim_messages( messages: Union[Iterable[MessageLikeRepresentation], PromptValue], @@ -759,24 +761,30 @@ def dummy_token_counter(messages: List[BaseMessage]) -> int: AIMessage("This is a 4 token text. The full message is 10 tokens.", id="fourth"), ] """ # noqa: E501 - from langchain_core.language_models import BaseLanguageModel if start_on and strategy == "first": raise ValueError if include_system and strategy == "first": raise ValueError messages = convert_to_messages(messages) - if isinstance(token_counter, BaseLanguageModel): - list_token_counter = token_counter.get_num_tokens_from_messages - elif ( - list(inspect.signature(token_counter).parameters.values())[0].annotation - is BaseMessage - ): - - def list_token_counter(messages: Sequence[BaseMessage]) -> int: - return sum(token_counter(msg) for msg in messages) # type: ignore[arg-type, misc] + if hasattr(token_counter, "get_num_tokens_from_messages"): + list_token_counter = getattr(token_counter, "get_num_tokens_from_messages") + elif callable(token_counter): + if ( + list(inspect.signature(token_counter).parameters.values())[0].annotation + is BaseMessage + ): + + def list_token_counter(messages: Sequence[BaseMessage]) -> int: + return sum(token_counter(msg) for msg in messages) # type: ignore[arg-type, misc] + else: + list_token_counter = token_counter # type: ignore[assignment] else: - list_token_counter = token_counter # type: ignore[assignment] + raise ValueError( + f"'token_counter' expected ot be a model that implements " + f"'get_num_tokens_from_messages()' or a function. Received object of type " + f"{type(token_counter)}." + ) try: from langchain_text_splitters import TextSplitter diff --git a/libs/core/tests/unit_tests/messages/test_utils.py b/libs/core/tests/unit_tests/messages/test_utils.py index d3160cb47ed02..142272292f609 100644 --- a/libs/core/tests/unit_tests/messages/test_utils.py +++ b/libs/core/tests/unit_tests/messages/test_utils.py @@ -2,6 +2,7 @@ import pytest +from langchain_core.language_models.fake_chat_models import FakeChatModel from langchain_core.messages import ( AIMessage, BaseMessage, @@ -316,6 +317,19 @@ def test_trim_messages_invoke() -> None: assert actual == expected +def test_trim_messages_bound_model_token_counter() -> None: + trimmer = trim_messages( + max_tokens=10, token_counter=FakeTokenCountingModel().bind(foo="bar") + ) + trimmer.invoke([HumanMessage("foobar")]) + + +def test_trim_messages_bad_token_counter() -> None: + trimmer = trim_messages(max_tokens=10, token_counter={}) + with pytest.raises(ValueError): + trimmer.invoke([HumanMessage("foobar")]) + + def dummy_token_counter(messages: List[BaseMessage]) -> int: # treat each message like it adds 3 default tokens at the beginning # of the message and at the end of the message. 3 + 4 + 3 = 10 tokens @@ -338,3 +352,8 @@ def dummy_token_counter(messages: List[BaseMessage]) -> int: + default_msg_suffix_len ) return count + + +class FakeTokenCountingModel(FakeChatModel): + def get_num_tokens_from_messages(self, messages: List[BaseMessage]) -> int: + return dummy_token_counter(messages) From d324fd1821566ad68d653c0e4e36ad4b43d54630 Mon Sep 17 00:00:00 2001 From: Leonid Ganeline Date: Tue, 20 Aug 2024 05:00:58 -0700 Subject: [PATCH 21/80] docs: added Constitutional AI references (#25553) Added reference to the source paper. --- .../docs/versions/migrating_chains/constitutional_chain.ipynb | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/docs/versions/migrating_chains/constitutional_chain.ipynb b/docs/docs/versions/migrating_chains/constitutional_chain.ipynb index c3729b67c5ae7..771145547cdf7 100644 --- a/docs/docs/versions/migrating_chains/constitutional_chain.ipynb +++ b/docs/docs/versions/migrating_chains/constitutional_chain.ipynb @@ -17,6 +17,8 @@ "source": [ "[ConstitutionalChain](https://api.python.langchain.com/en/latest/chains/langchain.chains.constitutional_ai.base.ConstitutionalChain.html) allowed for a LLM to critique and revise generations based on [principles](https://api.python.langchain.com/en/latest/chains/langchain.chains.constitutional_ai.models.ConstitutionalPrinciple.html), structured as combinations of critique and revision requests. For example, a principle might include a request to identify harmful content, and a request to rewrite the content.\n", "\n", + "`Constitutional AI principles` are based on the [Constitutional AI: Harmlessness from AI Feedback](https://arxiv.org/pdf/2212.08073) paper.\n", + "\n", "In `ConstitutionalChain`, this structure of critique requests and associated revisions was formatted into a LLM prompt and parsed out of string responses. This is more naturally achieved via [structured output](/docs/how_to/structured_output/) features of chat models. We can construct a simple chain in [LangGraph](https://langchain-ai.github.io/langgraph/) for this purpose. Some advantages of this approach include:\n", "\n", "- Leverage tool-calling capabilities of chat models that have been fine-tuned for this purpose;\n", @@ -324,7 +326,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.4" + "version": "3.10.12" } }, "nbformat": 4, From 498a482e76545ecdf79a71c9703bc018621bcafd Mon Sep 17 00:00:00 2001 From: Abraham Omorogbe <9068012+AbeOmor@users.noreply.github.com> Date: Tue, 20 Aug 2024 05:01:32 -0700 Subject: [PATCH 22/80] docs: Adding Azure Database for PostgreSQL docs (#25560) This PR to show support for the Azure Database for PostgreSQL Vector Store and Memory [Azure Database for PostgreSQL - Flexible Server](https://learn.microsoft.com/en-us/azure/postgresql/flexible-server/service-overview) [Azure Database for PostgreSQL pgvector extension](https://learn.microsoft.com/en-us/azure/postgresql/flexible-server/how-to-use-pgvector) **Description:** Added vector store and memory usage documentation for Azure Database for PostgreSQL **Twitter handle:** [@_aiabe](https://x.com/_aiabe) --------- Co-authored-by: Abeomor <{ID}+{username}@users.noreply.github.com> --- .../docs/integrations/platforms/microsoft.mdx | 22 ++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/docs/docs/integrations/platforms/microsoft.mdx b/docs/docs/integrations/platforms/microsoft.mdx index 180b021963e03..a0d6807ed7793 100644 --- a/docs/docs/integrations/platforms/microsoft.mdx +++ b/docs/docs/integrations/platforms/microsoft.mdx @@ -261,6 +261,7 @@ from langchain_community.document_loaders.onenote import OneNoteLoader [AI agent](https://learn.microsoft.com/en-us/azure/cosmos-db/ai-agents) needs robust memory systems that support multi-modality, offer strong operational performance, and enable agent memory sharing as well as separation. +### Azure Cosmos DB AI agents can rely on Azure Cosmos DB as a unified [memory system](https://learn.microsoft.com/en-us/azure/cosmos-db/ai-agents#memory-can-make-or-break-agents) solution, enjoying speed, scale, and simplicity. This service successfully [enabled OpenAI's ChatGPT service](https://www.youtube.com/watch?v=6IIUtEFKJec&t) to scale dynamically with high reliability and low maintenance. Powered by an atom-record-sequence engine, it is the world's first globally distributed [NoSQL](https://learn.microsoft.com/en-us/azure/cosmos-db/distributed-nosql), [relational](https://learn.microsoft.com/en-us/azure/cosmos-db/distributed-relational), and [vector database](https://learn.microsoft.com/en-us/azure/cosmos-db/vector-database) service that offers a serverless mode. Below are two available Azure Cosmos DB APIs that can provide vector store functionalities. @@ -327,6 +328,15 @@ See a [usage example](/docs/integrations/vectorstores/azure_cosmos_db_no_sql). from langchain_community.vectorstores import AzureCosmosDBNoSQLVectorSearch ``` +### Azure Database for PostgreSQL +>[Azure Database for PostgreSQL - Flexible Server](https://learn.microsoft.com/en-us/azure/postgresql/flexible-server/service-overview) is a relational database service based on the open-source Postgres database engine. It's a fully managed database-as-a-service that can handle mission-critical workloads with predictable performance, security, high availability, and dynamic scalability. + +See [set up instructions](https://learn.microsoft.com/en-us/azure/postgresql/flexible-server/quickstart-create-server-portal) for Azure Database for PostgreSQL. + +See a [usage example](/docs/integrations/memory/postgres_chat_message_history/). Simply use the [connection string](https://learn.microsoft.com/en-us/azure/postgresql/flexible-server/connect-python?tabs=cmd%2Cpassword#add-authentication-code) from your Azure Portal. + +Since Azure Database for PostgreSQL is open-source Postgres, you can use the [LangChain's Postgres support](/docs/integrations/vectorstores/pgvector/) to connect to Azure Database for PostgreSQL. + ## Retrievers ### Azure AI Search @@ -347,6 +357,17 @@ See a [usage example](/docs/integrations/retrievers/azure_ai_search). from langchain.retrievers import AzureAISearchRetriever ``` +## Vector Store +### Azure Database for PostgreSQL +>[Azure Database for PostgreSQL - Flexible Server](https://learn.microsoft.com/en-us/azure/postgresql/flexible-server/service-overview) is a relational database service based on the open-source Postgres database engine. It's a fully managed database-as-a-service that can handle mission-critical workloads with predictable performance, security, high availability, and dynamic scalability. + +See [set up instructions](https://learn.microsoft.com/en-us/azure/postgresql/flexible-server/quickstart-create-server-portal) for Azure Database for PostgreSQL. + +You need to [enable pgvector extension](https://learn.microsoft.com/en-us/azure/postgresql/flexible-server/how-to-use-pgvector) in your database to use Postgres as a vector store. Once you have the extension enabled, you can use the [PGVector in LangChain](/docs/integrations/vectorstores/pgvector/) to connect to Azure Database for PostgreSQL. + +See a [usage example](/docs/integrations/vectorstores/pgvector/). Simply use the [connection string](https://learn.microsoft.com/en-us/azure/postgresql/flexible-server/connect-python?tabs=cmd%2Cpassword#add-authentication-code) from your Azure Portal. + + ## Tools ### Azure Container Apps dynamic sessions @@ -496,4 +517,3 @@ See [usage examples](https://python.langchain.com/v0.1/docs/guides/productioniza ```python from langchain_experimental.data_anonymizer import PresidioAnonymizer, PresidioReversibleAnonymizer ``` - From 12e490ea562f80928a10686f9b19e453a80eacbe Mon Sep 17 00:00:00 2001 From: Jabir <162261546+ja6ir@users.noreply.github.com> Date: Tue, 20 Aug 2024 18:23:30 +0530 Subject: [PATCH 23/80] Update azuresearch.py (#25577) This will allow complextype metadata to be returned. the current implementation throws error when dealing with nested metadata Thank you for contributing to LangChain! - [x] **PR title**: "package: description" - Where "package" is whichever of langchain, community, core, experimental, etc. is being modified. Use "docs: ..." for purely docs changes, "templates: ..." for template changes, "infra: ..." for CI changes. - Example: "community: add foobar LLM" - [ ] **PR message**: ***Delete this entire checklist*** and replace with - **Description:** a description of the change - **Issue:** the issue # it fixes, if applicable - **Dependencies:** any dependencies required for this change - **Twitter handle:** if your PR gets announced, and you'd like a mention, we'll gladly shout you out! - [ ] **Add tests and docs**: If you're adding a new integration, please include 1. a test for the integration, preferably unit tests that do not rely on network access, 2. an example notebook showing its use. It lives in `docs/docs/integrations` directory. - [ ] **Lint and test**: Run `make format`, `make lint` and `make test` from the root of the package(s) you've modified. See contribution guidelines for more: https://python.langchain.com/docs/contributing/ Additional guidelines: - Make sure optional dependencies are imported within a function. - Please do not add dependencies to pyproject.toml files (even optional ones) unless they are required for unit tests. - Most PRs should not touch more than one package. - Changes should be backwards compatible. - If you are adding something to community, do not re-import it in langchain. If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, ccurme, vbarda, hwchase17. --------- Co-authored-by: Chester Curme --- .../langchain_community/vectorstores/azuresearch.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/libs/community/langchain_community/vectorstores/azuresearch.py b/libs/community/langchain_community/vectorstores/azuresearch.py index 5d7a6fc8edc8a..9154f4493396d 100644 --- a/libs/community/langchain_community/vectorstores/azuresearch.py +++ b/libs/community/langchain_community/vectorstores/azuresearch.py @@ -1735,7 +1735,11 @@ def _reorder_results_with_maximal_marginal_relevance( def _result_to_document(result: Dict) -> Document: return Document( page_content=result.pop(FIELDS_CONTENT), - metadata=json.loads(result[FIELDS_METADATA]) + metadata=( + result[FIELDS_METADATA] + if isinstance(result[FIELDS_METADATA], dict) + else json.loads(result[FIELDS_METADATA]) + ) if FIELDS_METADATA in result else { key: value for key, value in result.items() if key != FIELDS_CONTENT_VECTOR From 8e3e532e7dd62f4cc61753baa5954692b6cc0bb7 Mon Sep 17 00:00:00 2001 From: Bob Merkus Date: Tue, 20 Aug 2024 15:20:59 +0200 Subject: [PATCH 24/80] docs: ollama doc update (toolcalling, install, notebook examples) (#25549) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The new `langchain-ollama` package seems pretty well implemented, but I noticed the docs were still outdated so I decided to fix em up a bit. - Llama3.1 was release on 23rd of July; https://ai.meta.com/blog/meta-llama-3-1/ - Ollama supports tool calling since 25th of July; https://ollama.com/blog/tool-support - LangChain Ollama partner package was released 1st of august; https://pypi.org/project/langchain-ollama/ **Problem**: Docs note langchain-community instead of langchain-ollama **Solution**: Update docs to https://python.langchain.com/v0.2/docs/integrations/chat/ollama/ **Problem**: OllamaFunctions is deprecated, as noted on [Integrations](https://python.langchain.com/v0.2/docs/integrations/chat/ollama_functions/): This was an experimental wrapper that attempts to bolt-on tool calling support to models that do not natively support it. The [primary Ollama integration](https://python.langchain.com/v0.2/docs/integrations/chat/ollama/) now supports tool calling, and should be used instead. **Solution**: Delete old notebook from repo, update the existing one with @tool decorator + pydantic examples to the notebook **Problem**: Llama3.1 was released while llama3-groq-tool-call fine-tune Is noted in notebooks. **Solution**: update docs + notebooks to llama3.1 (which has improved tool calling support) **Problem**: Install instructions are incomplete, there is no information to download a model and/or run the Ollama server **Solution**: Add simple instructions to start the ollama service and pull model (for toolcalling) --------- Co-authored-by: Chester Curme --- docs/docs/integrations/chat/ollama.ipynb | 70 ++-- .../integrations/chat/ollama_functions.ipynb | 322 ------------------ docs/docs/integrations/llms/ollama.ipynb | 12 +- docs/docs/integrations/providers/ollama.mdx | 45 ++- docs/vercel.json | 4 + 5 files changed, 72 insertions(+), 381 deletions(-) delete mode 100644 docs/docs/integrations/chat/ollama_functions.ipynb diff --git a/docs/docs/integrations/chat/ollama.ipynb b/docs/docs/integrations/chat/ollama.ipynb index f59474094a716..427590d01ed3d 100644 --- a/docs/docs/integrations/chat/ollama.ipynb +++ b/docs/docs/integrations/chat/ollama.ipynb @@ -110,7 +110,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 9, "id": "cb09c344-1836-4e0c-acf8-11d13ac1dbae", "metadata": {}, "outputs": [], @@ -118,7 +118,7 @@ "from langchain_ollama import ChatOllama\n", "\n", "llm = ChatOllama(\n", - " model=\"llama3\",\n", + " model=\"llama3.1\",\n", " temperature=0,\n", " # other params...\n", ")" @@ -134,7 +134,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 10, "id": "62e0dbc3", "metadata": { "tags": [] @@ -143,10 +143,10 @@ { "data": { "text/plain": [ - "AIMessage(content='Je adore le programmation.\\n\\n(Note: \"programmation\" is not commonly used in French, but I translated it as \"le programmation\" to maintain the same grammatical structure and meaning as the original English sentence.)', response_metadata={'model': 'llama3', 'created_at': '2024-07-22T17:43:54.731273Z', 'message': {'role': 'assistant', 'content': ''}, 'done_reason': 'stop', 'done': True, 'total_duration': 11094839375, 'load_duration': 10121854667, 'prompt_eval_count': 36, 'prompt_eval_duration': 146569000, 'eval_count': 46, 'eval_duration': 816593000}, id='run-befccbdc-e1f9-42a9-85cf-e69b926d6b8b-0', usage_metadata={'input_tokens': 36, 'output_tokens': 46, 'total_tokens': 82})" + "AIMessage(content='The translation of \"I love programming\" from English to French is:\\n\\n\"J\\'adore programmer.\"', response_metadata={'model': 'llama3.1', 'created_at': '2024-08-19T16:05:32.81965Z', 'message': {'role': 'assistant', 'content': ''}, 'done_reason': 'stop', 'done': True, 'total_duration': 2167842917, 'load_duration': 54222584, 'prompt_eval_count': 35, 'prompt_eval_duration': 893007000, 'eval_count': 22, 'eval_duration': 1218962000}, id='run-0863daa2-43bf-4a43-86cc-611b23eae466-0', usage_metadata={'input_tokens': 35, 'output_tokens': 22, 'total_tokens': 57})" ] }, - "execution_count": 4, + "execution_count": 10, "metadata": {}, "output_type": "execute_result" } @@ -167,7 +167,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 11, "id": "d86145b3-bfef-46e8-b227-4dda5c9c2705", "metadata": {}, "outputs": [ @@ -175,9 +175,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Je adore le programmation.\n", + "The translation of \"I love programming\" from English to French is:\n", "\n", - "(Note: \"programmation\" is not commonly used in French, but I translated it as \"le programmation\" to maintain the same grammatical structure and meaning as the original English sentence.)\n" + "\"J'adore programmer.\"\n" ] } ], @@ -197,17 +197,17 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 12, "id": "e197d1d7-a070-4c96-9f8a-a0e86d046e0b", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "AIMessage(content='Ich liebe Programmieren!\\n\\n(Note: \"Ich liebe\" means \"I love\", \"Programmieren\" is the verb for \"programming\")', response_metadata={'model': 'llama3', 'created_at': '2024-07-04T04:22:33.864132Z', 'message': {'role': 'assistant', 'content': ''}, 'done_reason': 'stop', 'done': True, 'total_duration': 1310800083, 'load_duration': 1782000, 'prompt_eval_count': 16, 'prompt_eval_duration': 250199000, 'eval_count': 29, 'eval_duration': 1057192000}, id='run-cbadbe59-2de2-4ec0-a18a-b3220226c3d2-0')" + "AIMessage(content='Das Programmieren ist mir ein Leidenschaft! (That\\'s \"Programming is my passion!\" in German.) Would you like me to translate anything else?', response_metadata={'model': 'llama3.1', 'created_at': '2024-08-19T16:05:34.893548Z', 'message': {'role': 'assistant', 'content': ''}, 'done_reason': 'stop', 'done': True, 'total_duration': 2045997333, 'load_duration': 22584792, 'prompt_eval_count': 30, 'prompt_eval_duration': 213210000, 'eval_count': 32, 'eval_duration': 1808541000}, id='run-d18e1c6b-50e0-4b1d-b23a-973fa058edad-0', usage_metadata={'input_tokens': 30, 'output_tokens': 32, 'total_tokens': 62})" ] }, - "execution_count": 9, + "execution_count": 12, "metadata": {}, "output_type": "execute_result" } @@ -242,33 +242,32 @@ "source": [ "## Tool calling\n", "\n", - "We can use [tool calling](https://blog.langchain.dev/improving-core-tool-interfaces-and-docs-in-langchain/) with an LLM [that has been fine-tuned for tool use](https://ollama.com/library/llama3-groq-tool-use): \n", + "We can use [tool calling](https://blog.langchain.dev/improving-core-tool-interfaces-and-docs-in-langchain/) with an LLM [that has been fine-tuned for tool use](https://ollama.com/library/llama3.1): \n", "\n", "```\n", - "ollama pull llama3-groq-tool-use\n", + "ollama pull llama3.1\n", "```\n", "\n", - "We can just pass normal Python functions directly as tools." + "Details on creating custom tools are available in [this guide](/docs/how_to/custom_tools/). Below, we demonstrate how to create a tool using the `@tool` decorator on a normal python function." ] }, { "cell_type": "code", - "execution_count": 10, - "id": "5250bceb-1029-41ff-b447-983518704d88", + "execution_count": 13, + "id": "f767015f", "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[{'name': 'validate_user',\n", - " 'args': {'addresses': ['123 Fake St, Boston MA',\n", - " '234 Pretend Boulevard, Houston TX'],\n", - " 'user_id': 123},\n", - " 'id': 'fe2148d3-95fb-48e9-845a-4bfecc1f1f96',\n", + " 'args': {'addresses': '[\"123 Fake St, Boston, MA\", \"234 Pretend Boulevard, Houston, TX\"]',\n", + " 'user_id': '123'},\n", + " 'id': '40fe3de0-500c-4b91-9616-5932a929e640',\n", " 'type': 'tool_call'}]" ] }, - "execution_count": 10, + "execution_count": 13, "metadata": {}, "output_type": "execute_result" } @@ -276,22 +275,23 @@ "source": [ "from typing import List\n", "\n", + "from langchain_core.tools import tool\n", "from langchain_ollama import ChatOllama\n", - "from typing_extensions import TypedDict\n", "\n", "\n", - "def validate_user(user_id: int, addresses: List) -> bool:\n", + "@tool\n", + "def validate_user(user_id: int, addresses: List[str]) -> bool:\n", " \"\"\"Validate user using historical addresses.\n", "\n", " Args:\n", - " user_id: (int) the user ID.\n", - " addresses: Previous addresses.\n", + " user_id (int): the user ID.\n", + " addresses (List[str]): Previous addresses as a list of strings.\n", " \"\"\"\n", " return True\n", "\n", "\n", "llm = ChatOllama(\n", - " model=\"llama3-groq-tool-use\",\n", + " model=\"llama3.1\",\n", " temperature=0,\n", ").bind_tools([validate_user])\n", "\n", @@ -303,18 +303,6 @@ "result.tool_calls" ] }, - { - "cell_type": "markdown", - "id": "2bb034ff-218f-4865-afea-3f5e57d3bdee", - "metadata": {}, - "source": [ - "We look at the LangSmith trace to see that the tool call was performed: \n", - "\n", - "https://smith.langchain.com/public/4169348a-d6be-45df-a7cf-032f6baa4697/r\n", - "\n", - "In particular, the trace shows how the tool schema was populated." - ] - }, { "cell_type": "markdown", "id": "4c5e0197", @@ -331,7 +319,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 15, "id": "36c9b1c2", "metadata": {}, "outputs": [ @@ -391,7 +379,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 16, "id": "32b3ba7b", "metadata": {}, "outputs": [ @@ -467,7 +455,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.12.4" } }, "nbformat": 4, diff --git a/docs/docs/integrations/chat/ollama_functions.ipynb b/docs/docs/integrations/chat/ollama_functions.ipynb deleted file mode 100644 index 96dc9f3f2315b..0000000000000 --- a/docs/docs/integrations/chat/ollama_functions.ipynb +++ /dev/null @@ -1,322 +0,0 @@ -{ - "cells": [ - { - "cell_type": "raw", - "metadata": {}, - "source": [ - "---\n", - "sidebar_label: Ollama Functions\n", - "sidebar_class_name: hidden\n", - "---" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# OllamaFunctions\n", - "\n", - ":::warning\n", - "\n", - "This was an experimental wrapper that attempts to bolt-on tool calling support to models that do not natively support it. The [primary Ollama integration](/docs/integrations/chat/ollama/) now supports tool calling, and should be used instead.\n", - "\n", - ":::\n", - "This notebook shows how to use an experimental wrapper around Ollama that gives it [tool calling capabilities](https://python.langchain.com/v0.2/docs/concepts/#functiontool-calling).\n", - "\n", - "Note that more powerful and capable models will perform better with complex schema and/or multiple functions. The examples below use llama3 and phi3 models.\n", - "For a complete list of supported models and model variants, see the [Ollama model library](https://ollama.ai/library).\n", - "\n", - "## Overview\n", - "\n", - "### Integration details\n", - "\n", - "| Class | Package | Local | Serializable | JS support | Package downloads | Package latest |\n", - "|:-----------------------------------------------------------------------------------------------------------------------------------:|:-------:|:-----:|:------------:|:----------:|:-----------------:|:--------------:|\n", - "| [OllamaFunctions](https://api.python.langchain.com/en/latest/llms/langchain_experimental.llms.ollama_function.OllamaFunctions.html) | [langchain-experimental](https://api.python.langchain.com/en/latest/openai_api_reference.html) | ✅ | ❌ | ❌ | ![PyPI - Downloads](https://img.shields.io/pypi/dm/langchain-experimental?style=flat-square&label=%20) | ![PyPI - Version](https://img.shields.io/pypi/v/langchain-experimental?style=flat-square&label=%20) |\n", - "\n", - "### Model features\n", - "\n", - "| [Tool calling](/docs/how_to/tool_calling/) | [Structured output](/docs/how_to/structured_output/) | JSON mode | Image input | Audio input | Video input | [Token-level streaming](/docs/how_to/chat_streaming/) | Native async | [Token usage](/docs/how_to/chat_token_usage_tracking/) | [Logprobs](/docs/how_to/logprobs/) |\n", - "| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |\n", - "| ✅ | ✅ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | ❌ | ❌ |\n", - "\n", - "## Setup\n", - "\n", - "To access `OllamaFunctions` you will need to install `langchain-experimental` integration package.\n", - "Follow [these instructions](https://github.com/jmorganca/ollama) to set up and run a local Ollama instance as well as download and serve [supported models](https://ollama.com/library).\n", - "\n", - "### Credentials\n", - "\n", - "Credentials support is not present at this time.\n", - "\n", - "### Installation\n", - "\n", - "The `OllamaFunctions` class lives in the `langchain-experimental` package:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%pip install -qU langchain-experimental" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Instantiation\n", - "\n", - "`OllamaFunctions` takes the same init parameters as `ChatOllama`. \n", - "\n", - "In order to use tool calling, you must also specify `format=\"json\"`." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "ExecuteTime": { - "end_time": "2024-06-23T15:20:21.818089Z", - "start_time": "2024-06-23T15:20:21.815759Z" - } - }, - "outputs": [], - "source": [ - "from langchain_experimental.llms.ollama_functions import OllamaFunctions\n", - "\n", - "llm = OllamaFunctions(model=\"phi3\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Invocation" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "ExecuteTime": { - "end_time": "2024-06-23T15:20:46.794689Z", - "start_time": "2024-06-23T15:20:44.982632Z" - } - }, - "outputs": [ - { - "data": { - "text/plain": [ - "AIMessage(content=\"J'adore programmer.\", id='run-94815fcf-ae11-438a-ba3f-00819328b5cd-0')" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "messages = [\n", - " (\n", - " \"system\",\n", - " \"You are a helpful assistant that translates English to French. Translate the user sentence.\",\n", - " ),\n", - " (\"human\", \"I love programming.\"),\n", - "]\n", - "ai_msg = llm.invoke(messages)\n", - "ai_msg" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "\"J'adore programmer.\"" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ai_msg.content" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Chaining\n", - "\n", - "We can [chain](https://python.langchain.com/v0.2/docs/how_to/sequence/) our model with a prompt template like so:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "AIMessage(content='Programmieren ist sehr verrückt! Es freut mich, dass Sie auf Programmierung so positiv eingestellt sind.', id='run-ee99be5e-4d48-4ab6-b602-35415f0bdbde-0')" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from langchain_core.prompts import ChatPromptTemplate\n", - "\n", - "prompt = ChatPromptTemplate.from_messages(\n", - " [\n", - " (\n", - " \"system\",\n", - " \"You are a helpful assistant that translates {input_language} to {output_language}.\",\n", - " ),\n", - " (\"human\", \"{input}\"),\n", - " ]\n", - ")\n", - "\n", - "chain = prompt | llm\n", - "chain.invoke(\n", - " {\n", - " \"input_language\": \"English\",\n", - " \"output_language\": \"German\",\n", - " \"input\": \"I love programming.\",\n", - " }\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Tool Calling\n", - "\n", - "### OllamaFunctions.bind_tools()\n", - "\n", - "With `OllamaFunctions.bind_tools`, we can easily pass in Pydantic classes, dict schemas, LangChain tools, or even functions as tools to the model. Under the hood these are converted to a tool definition schemas, which looks like:" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "from langchain_core.pydantic_v1 import BaseModel, Field\n", - "\n", - "\n", - "class GetWeather(BaseModel):\n", - " \"\"\"Get the current weather in a given location\"\"\"\n", - "\n", - " location: str = Field(..., description=\"The city and state, e.g. San Francisco, CA\")\n", - "\n", - "\n", - "llm_with_tools = llm.bind_tools([GetWeather])" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "AIMessage(content='', id='run-b9769435-ec6a-4cb8-8545-5a5035fc19bd-0', tool_calls=[{'name': 'GetWeather', 'args': {'location': 'San Francisco, CA'}, 'id': 'call_064c4e1cb27e4adb9e4e7ed60362ecc9'}])" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ai_msg = llm_with_tools.invoke(\n", - " \"what is the weather like in San Francisco\",\n", - ")\n", - "ai_msg" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### AIMessage.tool_calls\n", - "\n", - "Notice that the AIMessage has a `tool_calls` attribute. This contains in a standardized `ToolCall` format that is model-provider agnostic." - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[{'name': 'GetWeather',\n", - " 'args': {'location': 'San Francisco, CA'},\n", - " 'id': 'call_064c4e1cb27e4adb9e4e7ed60362ecc9'}]" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ai_msg.tool_calls" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "For more on binding tools and tool call outputs, head to the [tool calling](../../how_to/function_calling.ipynb) docs." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## API reference\n", - "\n", - "For detailed documentation of all ToolCallingLLM features and configurations head to the API reference: https://api.python.langchain.com/en/latest/llms/langchain_experimental.llms.ollama_functions.OllamaFunctions.html\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.12" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/docs/integrations/llms/ollama.ipynb b/docs/docs/integrations/llms/ollama.ipynb index 1ab33f6c018dc..702bd912db55e 100644 --- a/docs/docs/integrations/llms/ollama.ipynb +++ b/docs/docs/integrations/llms/ollama.ipynb @@ -68,7 +68,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 2, "id": "035dea0f", "metadata": { "tags": [] @@ -77,10 +77,10 @@ { "data": { "text/plain": [ - "'A great start!\\n\\nLangChain is a type of AI model that uses language processing techniques to generate human-like text based on input prompts or chains of reasoning. In other words, it can have a conversation with humans, understanding the context and responding accordingly.\\n\\nHere\\'s a possible breakdown:\\n\\n* \"Lang\" likely refers to its focus on natural language processing (NLP) and linguistic analysis.\\n* \"Chain\" suggests that LangChain is designed to generate text in response to a series of connected ideas or prompts, rather than simply generating random text.\\n\\nSo, what do you think LangChain\\'s capabilities might be?'" + "\"Sounds like a plan!\\n\\nTo answer what LangChain is, let's break it down step by step.\\n\\n**Step 1: Understand the Context**\\nLangChain seems to be related to language or programming, possibly in an AI context. This makes me wonder if it's a framework, library, or tool for building models or interacting with them.\\n\\n**Step 2: Research Possible Definitions**\\nAfter some quick searching, I found that LangChain is actually a Python library for building and composing conversational AI models. It seems to provide a way to create modular and reusable components for chatbots, voice assistants, and other conversational interfaces.\\n\\n**Step 3: Explore Key Features and Use Cases**\\nLangChain likely offers features such as:\\n\\n* Easy composition of conversational flows\\n* Support for various input/output formats (e.g., text, audio)\\n* Integration with popular AI frameworks and libraries\\n\\nUse cases might include building chatbots for customer service, creating voice assistants for smart homes, or developing interactive stories.\\n\\n**Step 4: Confirm the Definition**\\nAfter this step-by-step analysis, I'm fairly confident that LangChain is a Python library for building conversational AI models. If you'd like to verify or provide more context, feel free to do so!\"" ] }, - "execution_count": 4, + "execution_count": 2, "metadata": {}, "output_type": "execute_result" } @@ -95,7 +95,7 @@ "\n", "prompt = ChatPromptTemplate.from_template(template)\n", "\n", - "model = OllamaLLM(model=\"llama3\")\n", + "model = OllamaLLM(model=\"llama3.1\")\n", "\n", "chain = prompt | model\n", "\n", @@ -177,7 +177,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "id": "79aaf863", "metadata": {}, "outputs": [ @@ -218,7 +218,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.4" }, "vscode": { "interpreter": { diff --git a/docs/docs/integrations/providers/ollama.mdx b/docs/docs/integrations/providers/ollama.mdx index 704b02ab15f23..6a05b5e2be606 100644 --- a/docs/docs/integrations/providers/ollama.mdx +++ b/docs/docs/integrations/providers/ollama.mdx @@ -1,7 +1,7 @@ # Ollama >[Ollama](https://ollama.com/) allows you to run open-source large language models, -> such as LLaMA2, locally. +> such as [Llama3.1](https://ai.meta.com/blog/meta-llama-3-1/), locally. > >`Ollama` bundles model weights, configuration, and data into a single package, defined by a Modelfile. >It optimizes setup and configuration details, including GPU usage. @@ -11,14 +11,36 @@ See [this guide](/docs/how_to/local_llms) for more details on how to use `Ollama` with LangChain. ## Installation and Setup - -Follow [these instructions](https://github.com/ollama/ollama?tab=readme-ov-file#ollama) +### Ollama installation +Follow [these instructions](https://github.com/ollama/ollama?tab=readme-ov-file#ollama) to set up and run a local Ollama instance. +Ollama will start as a background service automatically, if this is disabled, run: + +```bash +# export OLLAMA_HOST=127.0.0.1 # environment variable to set ollama host +# export OLLAMA_PORT=11434 # environment variable to set the ollama port +ollama serve +``` + +After starting ollama, run `ollama pull ` to download a model +from the [Ollama model library](https://ollama.ai/library). + +```bash +ollama pull llama3.1 +``` + +We're now ready to install the `langchain-ollama` partner package and run a model. + +### Ollama LangChain partner package install +Install the integration package with: +```bash +pip install langchain-ollama +``` ## LLM ```python -from langchain_community.llms import Ollama +from langchain_ollama.llms import OllamaLLM ``` See the notebook example [here](/docs/integrations/llms/ollama). @@ -28,18 +50,17 @@ See the notebook example [here](/docs/integrations/llms/ollama). ### Chat Ollama ```python -from langchain_community.chat_models import ChatOllama +from langchain_ollama.chat_models import ChatOllama ``` See the notebook example [here](/docs/integrations/chat/ollama). -### Ollama functions - -```python -from langchain_experimental.llms.ollama_functions import OllamaFunctions -``` - -See the notebook example [here](/docs/integrations/chat/ollama_functions). +### Ollama tool calling +[Ollama tool calling](https://ollama.com/blog/tool-support) uses the +OpenAI compatible web server specification, and can be used with +the default `BaseChatModel.bind_tools()` methods +as described [here](/docs/how_to/tool_calling/). +Make sure to select an ollama model that supports [tool calling](https://ollama.com/search?&c=tools). ## Embedding models diff --git a/docs/vercel.json b/docs/vercel.json index 0648210037e4d..6afbee9e5e184 100644 --- a/docs/vercel.json +++ b/docs/vercel.json @@ -101,6 +101,10 @@ { "source": "/v0.2/docs/integrations/toolkits/xorbits/", "destination": "/v0.2/docs/integrations/tools#search" + }, + { + "source": "/v0.2/docs/integrations/chat/ollama_functions/", + "destination": "https://python.langchain.com/v0.1/docs/integrations/chat/ollama_functions/" } ] } From 8a71f1b41b83a4c8122ca53306fb09400ef5b537 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Tue, 20 Aug 2024 10:22:14 -0700 Subject: [PATCH 25/80] core[minor]: add langsmith document loader (#25493) needs tests --- .../document_loaders/langsmith.ipynb | 294 ++++++++++++++++++ .../document_loaders/__init__.py | 2 + .../document_loaders/langsmith.py | 128 ++++++++ .../document_loaders/test_langsmith.py | 58 ++++ 4 files changed, 482 insertions(+) create mode 100644 docs/docs/integrations/document_loaders/langsmith.ipynb create mode 100644 libs/core/langchain_core/document_loaders/langsmith.py create mode 100644 libs/core/tests/unit_tests/document_loaders/test_langsmith.py diff --git a/docs/docs/integrations/document_loaders/langsmith.ipynb b/docs/docs/integrations/document_loaders/langsmith.ipynb new file mode 100644 index 0000000000000..98413d15620fd --- /dev/null +++ b/docs/docs/integrations/document_loaders/langsmith.ipynb @@ -0,0 +1,294 @@ +{ + "cells": [ + { + "cell_type": "raw", + "metadata": {}, + "source": [ + "---\n", + "sidebar_label: LangSmith\n", + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# LangSmithLoader\n", + "\n", + "This notebook provides a quick overview for getting started with the LangSmith [document loader](https://python.langchain.com/v0.2/docs/concepts/#document-loaders). For detailed documentation of all LangSmithLoader features and configurations head to the [API reference](https://api.python.langchain.com/en/latest/document_loaders/langchain_core.document_loaders.langsmith.LangSmithLoader.html).\n", + "\n", + "## Overview\n", + "### Integration details\n", + "\n", + "| Class | Package | Local | Serializable | JS support|\n", + "| :--- | :--- | :---: | :---: | :---: |\n", + "| [LangSmithLoader](https://api.python.langchain.com/en/latest/document_loaders/langchain_core.document_loaders.langsmith.LangSmithLoader.html) | [langchain-core](https://api.python.langchain.com/en/latest/core_api_reference.html) | ❌ | ❌ | ❌ | \n", + "\n", + "### Loader features\n", + "| Source | Lazy loading | Native async\n", + "| :---: | :---: | :---: | \n", + "| LangSmithLoader | ✅ | ❌ | \n", + "\n", + "## Setup\n", + "\n", + "To access the LangSmith document loader you'll need to install `langchain-core`, create a [LangSmith](https://langsmith.com) account and get an API key.\n", + "\n", + "### Credentials\n", + "\n", + "Sign up at https://langsmith.com and generate an API key. Once you've done this set the LANGSMITH_API_KEY environment variable:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "if not os.environ.get(\"LANGSMITH_API_KEY\"):\n", + " os.environ[\"LANGSMITH_API_KEY\"] = getpass.getpass(\"Enter your LangSmith API key: \")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If you want to get automated best-in-class tracing, you can also turn on LangSmith tracing:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "# os.environ[\"LANGSMITH_TRACING\"] = \"true\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Installation\n", + "\n", + "Install `langchain-core`:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%pip install -qU langchain-core" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Clone example dataset\n", + "\n", + "For this example, we'll clone and load a public LangSmith dataset. Cloning creates a copy of this dataset on our personal LangSmith account. You can only load datasets that you have a personal copy of." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "from langsmith import Client as LangSmithClient\n", + "\n", + "ls_client = LangSmithClient()\n", + "\n", + "dataset_name = \"LangSmith Few Shot Datasets Notebook\"\n", + "dataset_public_url = (\n", + " \"https://smith.langchain.com/public/55658626-124a-4223-af45-07fb774a6212/d\"\n", + ")\n", + "\n", + "ls_client.clone_public_dataset(dataset_public_url)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Initialization\n", + "\n", + "Now we can instantiate our document loader and load documents:" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_core.document_loaders import LangSmithLoader\n", + "\n", + "loader = LangSmithLoader(\n", + " dataset_name=dataset_name,\n", + " content_key=\"question\",\n", + " limit=50,\n", + " # format_content=...,\n", + " # ...\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Show me an example using Weaviate, but customizing the vectorStoreRetriever to return the top 10 k nearest neighbors. \n" + ] + } + ], + "source": [ + "docs = loader.load()\n", + "print(docs[0].page_content)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'question': 'Show me an example using Weaviate, but customizing the vectorStoreRetriever to return the top 10 k nearest neighbors. '}\n" + ] + } + ], + "source": [ + "print(docs[0].metadata[\"inputs\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'answer': 'To customize the Weaviate client and return the top 10 k nearest neighbors, you can utilize the `as_retriever` method with the appropriate parameters. Here\\'s how you can achieve this:\\n\\n```python\\n# Assuming you have imported the necessary modules and classes\\n\\n# Create the Weaviate client\\nclient = weaviate.Client(url=os.environ[\"WEAVIATE_URL\"], ...)\\n\\n# Initialize the Weaviate wrapper\\nweaviate = Weaviate(client, index_name, text_key)\\n\\n# Customize the client to return top 10 k nearest neighbors using as_retriever\\ncustom_retriever = weaviate.as_retriever(\\n search_type=\"similarity\",\\n search_kwargs={\\n \\'k\\': 10 # Customize the value of k as needed\\n }\\n)\\n\\n# Now you can use the custom_retriever to perform searches\\nresults = custom_retriever.search(query, ...)\\n```'}\n" + ] + } + ], + "source": [ + "print(docs[0].metadata[\"outputs\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['dataset_id',\n", + " 'inputs',\n", + " 'outputs',\n", + " 'metadata',\n", + " 'id',\n", + " 'created_at',\n", + " 'modified_at',\n", + " 'runs',\n", + " 'source_run_id']" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "list(docs[0].metadata.keys())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Lazy Load" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "10" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "page = []\n", + "for doc in loader.lazy_load():\n", + " page.append(doc)\n", + " if len(page) >= 10:\n", + " # do some paged operation, e.g.\n", + " # index.upsert(page)\n", + " # page = []\n", + " break\n", + "len(page)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## API reference\n", + "\n", + "For detailed documentation of all LangSmithLoader features and configurations head to the API reference: https://api.python.langchain.com/en/latest/document_loaders/langchain_core.document_loaders.langsmith.LangSmithLoader.html" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "poetry-venv-311", + "language": "python", + "name": "poetry-venv-311" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/libs/core/langchain_core/document_loaders/__init__.py b/libs/core/langchain_core/document_loaders/__init__.py index 05a48a9be01f0..e8b6fde5be89b 100644 --- a/libs/core/langchain_core/document_loaders/__init__.py +++ b/libs/core/langchain_core/document_loaders/__init__.py @@ -1,5 +1,6 @@ from langchain_core.document_loaders.base import BaseBlobParser, BaseLoader from langchain_core.document_loaders.blob_loaders import Blob, BlobLoader, PathLike +from langchain_core.document_loaders.langsmith import LangSmithLoader __all__ = [ "BaseBlobParser", @@ -7,4 +8,5 @@ "Blob", "BlobLoader", "PathLike", + "LangSmithLoader", ] diff --git a/libs/core/langchain_core/document_loaders/langsmith.py b/libs/core/langchain_core/document_loaders/langsmith.py new file mode 100644 index 0000000000000..232da98ccf7c6 --- /dev/null +++ b/libs/core/langchain_core/document_loaders/langsmith.py @@ -0,0 +1,128 @@ +import datetime +import json +import uuid +from typing import Any, Callable, Iterator, Optional, Sequence, Union + +from langsmith import Client as LangSmithClient + +from langchain_core.document_loaders.base import BaseLoader +from langchain_core.documents import Document + + +class LangSmithLoader(BaseLoader): + """Load LangSmith Dataset examples as Documents. + + Loads the example inputs as the Document page content and places the entire example + into the Document metadata. This allows you to easily create few-shot example + retrievers from the loaded documents. + + .. dropdown:: Lazy load + + .. code-block:: python + + from langchain_core.document_loaders import LangSmithLoader + + loader = LangSmithLoader(dataset_id="...", limit=100) + docs = [] + for doc in loader.lazy_load(): + docs.append(doc) + + .. code-block:: pycon + + # -> [Document("...", metadata={"inputs": {...}, "outputs": {...}, ...}), ...] + + .. versionadded:: 0.2.34 + """ # noqa: E501 + + def __init__( + self, + *, + dataset_id: Optional[Union[uuid.UUID, str]] = None, + dataset_name: Optional[str] = None, + example_ids: Optional[Sequence[Union[uuid.UUID, str]]] = None, + as_of: Optional[Union[datetime.datetime, str]] = None, + splits: Optional[Sequence[str]] = None, + inline_s3_urls: bool = True, + offset: int = 0, + limit: Optional[int] = None, + metadata: Optional[dict] = None, + filter: Optional[str] = None, + content_key: str = "", + format_content: Optional[Callable[..., str]] = None, + client: Optional[LangSmithClient] = None, + **client_kwargs: Any, + ) -> None: + """ + Args: + dataset_id: The ID of the dataset to filter by. Defaults to None. + dataset_name: The name of the dataset to filter by. Defaults to None. + content_key: The inputs key to set as Document page content. ``"."`` characters + are interpreted as nested keys. E.g. ``content_key="first.second"`` will + result in + ``Document(page_content=format_content(example.inputs["first"]["second"]))`` + format_content: Function for converting the content extracted from the example + inputs into a string. Defaults to JSON-encoding the contents. + example_ids: The IDs of the examples to filter by. Defaults to None. + as_of: The dataset version tag OR + timestamp to retrieve the examples as of. + Response examples will only be those that were present at the time + of the tagged (or timestamped) version. + splits: A list of dataset splits, which are + divisions of your dataset such as 'train', 'test', or 'validation'. + Returns examples only from the specified splits. + inline_s3_urls: Whether to inline S3 URLs. Defaults to True. + offset: The offset to start from. Defaults to 0. + limit: The maximum number of examples to return. + filter: A structured fileter string to apply to the examples. + client: LangSmith Client. If not provided will be initialized from below args. + client_kwargs: Keyword args to pass to LangSmith client init. Should only be + specified if ``client`` isn't. + """ # noqa: E501 + if client and client_kwargs: + raise ValueError + self._client = client or LangSmithClient(**client_kwargs) + self.content_key = list(content_key.split(".")) if content_key else [] + self.format_content = format_content or _stringify + self.dataset_id = dataset_id + self.dataset_name = dataset_name + self.example_ids = example_ids + self.as_of = as_of + self.splits = splits + self.inline_s3_urls = inline_s3_urls + self.offset = offset + self.limit = limit + self.metadata = metadata + self.filter = filter + + def lazy_load(self) -> Iterator[Document]: + for example in self._client.list_examples( + dataset_id=self.dataset_id, + dataset_name=self.dataset_name, + example_ids=self.example_ids, + as_of=self.as_of, + splits=self.splits, + inline_s3_urls=self.inline_s3_urls, + offset=self.offset, + limit=self.limit, + metadata=self.metadata, + filter=self.filter, + ): + content: Any = example.inputs + for key in self.content_key: + content = content[key] + content_str = self.format_content(content) + metadata = example.dict() + # Stringify datetime and UUID types. + for k in ("dataset_id", "created_at", "modified_at", "source_run_id", "id"): + metadata[k] = str(metadata[k]) if metadata[k] else metadata[k] + yield Document(content_str, metadata=metadata) + + +def _stringify(x: Union[str, dict]) -> str: + if isinstance(x, str): + return x + else: + try: + return json.dumps(x, indent=2) + except Exception: + return str(x) diff --git a/libs/core/tests/unit_tests/document_loaders/test_langsmith.py b/libs/core/tests/unit_tests/document_loaders/test_langsmith.py new file mode 100644 index 0000000000000..e754ab2d37220 --- /dev/null +++ b/libs/core/tests/unit_tests/document_loaders/test_langsmith.py @@ -0,0 +1,58 @@ +import datetime +import uuid +from unittest.mock import MagicMock, patch + +from langsmith.schemas import Example + +from langchain_core.document_loaders import LangSmithLoader +from langchain_core.documents import Document + + +def test_init() -> None: + LangSmithLoader(api_key="secret") + + +EXAMPLES = [ + Example( + inputs={"first": {"second": "foo"}}, + outputs={"res": "a"}, + dataset_id=uuid.uuid4(), + id=uuid.uuid4(), + created_at=datetime.datetime.now(), + ), + Example( + inputs={"first": {"second": "bar"}}, + outputs={"res": "b"}, + dataset_id=uuid.uuid4(), + id=uuid.uuid4(), + created_at=datetime.datetime.now(), + ), + Example( + inputs={"first": {"second": "baz"}}, + outputs={"res": "c"}, + dataset_id=uuid.uuid4(), + id=uuid.uuid4(), + created_at=datetime.datetime.now(), + ), +] + + +@patch("langsmith.Client.list_examples", MagicMock(return_value=iter(EXAMPLES))) +def test_lazy_load() -> None: + loader = LangSmithLoader( + api_key="dummy", + dataset_id="mock", + content_key="first.second", + format_content=(lambda x: x.upper()), + ) + expected = [] + for example in EXAMPLES: + metadata = { + k: v if not v or isinstance(v, dict) else str(v) + for k, v in example.dict().items() + } + expected.append( + Document(example.inputs["first"]["second"].upper(), metadata=metadata) + ) + actual = [doc for doc in loader.lazy_load()] + assert expected == actual From d40bdd6257ff49beab0d10998fd94513ee80bfd7 Mon Sep 17 00:00:00 2001 From: Isaac Francisco <78627776+isahers1@users.noreply.github.com> Date: Tue, 20 Aug 2024 10:54:42 -0700 Subject: [PATCH 26/80] docs: more indexing of document loaders (#25500) Co-authored-by: Bagatur Co-authored-by: Bagatur <22008038+baskaryan@users.noreply.github.com> --- .../integrations/document_loaders/index.mdx | 24 ++ .../document_loaders/notion.ipynb | 177 ++++++++++-- .../document_loaders/notiondb.ipynb | 161 ----------- .../integrations/document_loaders/xml.ipynb | 163 ++++++++++- docs/docs/integrations/providers/notion.mdx | 11 +- docs/src/theme/FeatureTables.js | 266 ++++++++++++++++++ docs/vercel.json | 4 + 7 files changed, 610 insertions(+), 196 deletions(-) delete mode 100644 docs/docs/integrations/document_loaders/notiondb.ipynb diff --git a/docs/docs/integrations/document_loaders/index.mdx b/docs/docs/integrations/document_loaders/index.mdx index 21076def823af..6dee9374e97d0 100644 --- a/docs/docs/integrations/document_loaders/index.mdx +++ b/docs/docs/integrations/document_loaders/index.mdx @@ -33,6 +33,30 @@ The below document loaders allow you to load PDF documents. +## Cloud Providers + +The below document loaders allow you to load documents from your favorite cloud providers. + + + +## Social Platforms + +The below document loaders allow you to load documents from differnt social media platforms. + + + +## Messaging Services + +The below document loaders allow you to load data from different messaging platforms. + + + +## Productivity tools + +The below document loaders allow you to load data from commonly used productivity tools. + + + ## Common File Types The below document loaders allow you to load data from common data formats. diff --git a/docs/docs/integrations/document_loaders/notion.ipynb b/docs/docs/integrations/document_loaders/notion.ipynb index 1c81e3765c33a..dc2c5fff53ee4 100644 --- a/docs/docs/integrations/document_loaders/notion.ipynb +++ b/docs/docs/integrations/document_loaders/notion.ipynb @@ -1,59 +1,204 @@ { "cells": [ { + "attachments": {}, "cell_type": "markdown", "id": "1dc7df1d", "metadata": {}, "source": [ - "# Notion DB 1/2\n", + "# Notion DB 2/2\n", "\n", ">[Notion](https://www.notion.so/) is a collaboration platform with modified Markdown support that integrates kanban boards, tasks, wikis and databases. It is an all-in-one workspace for notetaking, knowledge and data management, and project and task management.\n", "\n", - "This notebook covers how to load documents from a Notion database dump.\n", + "`NotionDBLoader` is a Python class for loading content from a `Notion` database. It retrieves pages from the database, reads their content, and returns a list of Document objects. `NotionDirectoryLoader` is used for loading data from a Notion database dump.\n", "\n", - "In order to get this notion dump, follow these instructions:\n", + "## Requirements\n", "\n", - "## 🧑 Instructions for ingesting your own dataset\n", + "- A `Notion` Database\n", + "- Notion Integration Token\n", "\n", - "Export your dataset from Notion. You can do this by clicking on the three dots in the upper right hand corner and then clicking `Export`.\n", + "## Setup\n", "\n", - "When exporting, make sure to select the `Markdown & CSV` format option.\n", + "### 1. Create a Notion Table Database\n", + "Create a new table database in Notion. You can add any column to the database and they will be treated as metadata. For example you can add the following columns:\n", "\n", - "This will produce a `.zip` file in your Downloads folder. Move the `.zip` file into this repository.\n", + "- Title: set Title as the default property.\n", + "- Categories: A Multi-select property to store categories associated with the page.\n", + "- Keywords: A Multi-select property to store keywords associated with the page.\n", "\n", - "Run the following command to unzip the zip file (replace the `Export...` with your own file name as needed).\n", + "Add your content to the body of each page in the database. The NotionDBLoader will extract the content and metadata from these pages.\n", "\n", - "```shell\n", - "unzip Export-d3adfe0f-3131-4bf3-8987-a52017fc1bae.zip -d Notion_DB\n", - "```\n", + "## 2. Create a Notion Integration\n", + "To create a Notion Integration, follow these steps:\n", + "\n", + "1. Visit the [Notion Developers](https://www.notion.com/my-integrations) page and log in with your Notion account.\n", + "2. Click on the \"+ New integration\" button.\n", + "3. Give your integration a name and choose the workspace where your database is located.\n", + "4. Select the require capabilities, this extension only need the Read content capability\n", + "5. Click the \"Submit\" button to create the integration.\n", + "Once the integration is created, you'll be provided with an `Integration Token (API key)`. Copy this token and keep it safe, as you'll need it to use the NotionDBLoader.\n", + "\n", + "### 3. Connect the Integration to the Database\n", + "To connect your integration to the database, follow these steps:\n", + "\n", + "1. Open your database in Notion.\n", + "2. Click on the three-dot menu icon in the top right corner of the database view.\n", + "3. Click on the \"+ New integration\" button.\n", + "4. Find your integration, you may need to start typing its name in the search box.\n", + "5. Click on the \"Connect\" button to connect the integration to the database.\n", + "\n", + "\n", + "### 4. Get the Database ID\n", + "To get the database ID, follow these steps:\n", + "\n", + "1. Open your database in Notion.\n", + "2. Click on the three-dot menu icon in the top right corner of the database view.\n", + "3. Select \"Copy link\" from the menu to copy the database URL to your clipboard.\n", + "4. The database ID is the long string of alphanumeric characters found in the URL. It typically looks like this: https://www.notion.so/username/8935f9d140a04f95a872520c4f123456?v=.... In this example, the database ID is 8935f9d140a04f95a872520c4f123456.\n", + "\n", + "With the database properly set up and the integration token and database ID in hand, you can now use the NotionDBLoader code to load content and metadata from your Notion database.\n", + "\n", + "### 5. Installation\n", + "\n", + "Instaall the `langchain-community` integration package.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "412b38dc", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install -qU langchain-community" + ] + }, + { + "cell_type": "markdown", + "id": "cced2931", + "metadata": {}, + "source": [ "\n", - "Run the following command to ingest the data." + "## Notion Database Loader\n", + "NotionDBLoader is part of the langchain package's document loaders. You can use it as follows:" ] }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 12, + "id": "6c3a314c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "········\n", + "········\n" + ] + } + ], + "source": [ + "from getpass import getpass\n", + "\n", + "NOTION_TOKEN = getpass()\n", + "DATABASE_ID = getpass()" + ] + }, + { + "cell_type": "code", + "execution_count": 13, "id": "007c5cbf", "metadata": {}, "outputs": [], "source": [ - "from langchain_community.document_loaders import NotionDirectoryLoader" + "from langchain_community.document_loaders import NotionDBLoader" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "id": "a1caec59", "metadata": {}, "outputs": [], "source": [ + "loader = NotionDBLoader(\n", + " integration_token=NOTION_TOKEN,\n", + " database_id=DATABASE_ID,\n", + " request_timeout_sec=30, # optional, defaults to 10\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "b1c30ff7", + "metadata": {}, + "outputs": [], + "source": [ + "docs = loader.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "4f5789a2", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "print(docs)" + ] + }, + { + "cell_type": "markdown", + "id": "2b87ab5c", + "metadata": {}, + "source": [ + "## Notion Directory Loader\n", + "\n", + "### Setup\n", + "\n", + "Export your dataset from Notion. You can do this by clicking on the three dots in the upper right hand corner and then clicking `Export`.\n", + "\n", + "When exporting, make sure to select the `Markdown & CSV` format option.\n", + "\n", + "This will produce a `.zip` file in your Downloads folder. Move the `.zip` file into this repository.\n", + "\n", + "Run the following command to unzip the zip file (replace the `Export...` with your own file name as needed).\n", + "\n", + "```shell\n", + "unzip Export-d3adfe0f-3131-4bf3-8987-a52017fc1bae.zip -d Notion_DB\n", + "```\n", + "\n", + "### Usage\n", + "\n", + "Run the following command to ingest the data you just downloaded." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9debffdd", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_community.document_loaders import NotionDirectoryLoader\n", + "\n", "loader = NotionDirectoryLoader(\"Notion_DB\")" ] }, { "cell_type": "code", "execution_count": null, - "id": "b1c30ff7", + "id": "81008087", "metadata": {}, "outputs": [], "source": [ diff --git a/docs/docs/integrations/document_loaders/notiondb.ipynb b/docs/docs/integrations/document_loaders/notiondb.ipynb deleted file mode 100644 index d612728f6adfc..0000000000000 --- a/docs/docs/integrations/document_loaders/notiondb.ipynb +++ /dev/null @@ -1,161 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "1dc7df1d", - "metadata": {}, - "source": [ - "# Notion DB 2/2\n", - "\n", - ">[Notion](https://www.notion.so/) is a collaboration platform with modified Markdown support that integrates kanban boards, tasks, wikis and databases. It is an all-in-one workspace for notetaking, knowledge and data management, and project and task management.\n", - "\n", - "`NotionDBLoader` is a Python class for loading content from a `Notion` database. It retrieves pages from the database, reads their content, and returns a list of Document objects.\n", - "\n", - "## Requirements\n", - "\n", - "- A `Notion` Database\n", - "- Notion Integration Token\n", - "\n", - "## Setup\n", - "\n", - "### 1. Create a Notion Table Database\n", - "Create a new table database in Notion. You can add any column to the database and they will be treated as metadata. For example you can add the following columns:\n", - "\n", - "- Title: set Title as the default property.\n", - "- Categories: A Multi-select property to store categories associated with the page.\n", - "- Keywords: A Multi-select property to store keywords associated with the page.\n", - "\n", - "Add your content to the body of each page in the database. The NotionDBLoader will extract the content and metadata from these pages.\n", - "\n", - "## 2. Create a Notion Integration\n", - "To create a Notion Integration, follow these steps:\n", - "\n", - "1. Visit the [Notion Developers](https://www.notion.com/my-integrations) page and log in with your Notion account.\n", - "2. Click on the \"+ New integration\" button.\n", - "3. Give your integration a name and choose the workspace where your database is located.\n", - "4. Select the require capabilities, this extension only need the Read content capability\n", - "5. Click the \"Submit\" button to create the integration.\n", - "Once the integration is created, you'll be provided with an `Integration Token (API key)`. Copy this token and keep it safe, as you'll need it to use the NotionDBLoader.\n", - "\n", - "### 3. Connect the Integration to the Database\n", - "To connect your integration to the database, follow these steps:\n", - "\n", - "1. Open your database in Notion.\n", - "2. Click on the three-dot menu icon in the top right corner of the database view.\n", - "3. Click on the \"+ New integration\" button.\n", - "4. Find your integration, you may need to start typing its name in the search box.\n", - "5. Click on the \"Connect\" button to connect the integration to the database.\n", - "\n", - "\n", - "### 4. Get the Database ID\n", - "To get the database ID, follow these steps:\n", - "\n", - "1. Open your database in Notion.\n", - "2. Click on the three-dot menu icon in the top right corner of the database view.\n", - "3. Select \"Copy link\" from the menu to copy the database URL to your clipboard.\n", - "4. The database ID is the long string of alphanumeric characters found in the URL. It typically looks like this: https://www.notion.so/username/8935f9d140a04f95a872520c4f123456?v=.... In this example, the database ID is 8935f9d140a04f95a872520c4f123456.\n", - "\n", - "With the database properly set up and the integration token and database ID in hand, you can now use the NotionDBLoader code to load content and metadata from your Notion database.\n", - "\n", - "## Usage\n", - "NotionDBLoader is part of the langchain package's document loaders. You can use it as follows:" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "6c3a314c", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "········\n", - "········\n" - ] - } - ], - "source": [ - "from getpass import getpass\n", - "\n", - "NOTION_TOKEN = getpass()\n", - "DATABASE_ID = getpass()" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "007c5cbf", - "metadata": {}, - "outputs": [], - "source": [ - "from langchain_community.document_loaders import NotionDBLoader" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "a1caec59", - "metadata": {}, - "outputs": [], - "source": [ - "loader = NotionDBLoader(\n", - " integration_token=NOTION_TOKEN,\n", - " database_id=DATABASE_ID,\n", - " request_timeout_sec=30, # optional, defaults to 10\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "b1c30ff7", - "metadata": {}, - "outputs": [], - "source": [ - "docs = loader.load()" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "4f5789a2", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n" - ] - } - ], - "source": [ - "print(docs)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.6" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/docs/integrations/document_loaders/xml.ipynb b/docs/docs/integrations/document_loaders/xml.ipynb index 55f2f14c6406a..0e28b8e0a3bde 100644 --- a/docs/docs/integrations/document_loaders/xml.ipynb +++ b/docs/docs/integrations/document_loaders/xml.ipynb @@ -2,18 +2,88 @@ "cells": [ { "cell_type": "markdown", - "id": "22a849cc", + "id": "72ccbe2b", "metadata": {}, "source": [ - "# XML\n", + "# UnstructuredXMLLoader\n", "\n", - "The `UnstructuredXMLLoader` is used to load `XML` files. The loader works with `.xml` files. The page content will be the text extracted from the XML tags." + "This notebook provides a quick overview for getting started with UnstructuredXMLLoader [document loader](https://python.langchain.com/v0.2/docs/concepts/#document-loaders). The `UnstructuredXMLLoader` is used to load `XML` files. The loader works with `.xml` files. The page content will be the text extracted from the XML tags.\n", + "\n", + "\n", + "## Overview\n", + "### Integration details\n", + "\n", + "\n", + "| Class | Package | Local | Serializable | [JS support](https://js.langchain.com/v0.2/docs/integrations/document_loaders/file_loaders/unstructured/)|\n", + "| :--- | :--- | :---: | :---: | :---: |\n", + "| [UnstructuredXMLLoader](https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.xml.UnstructuredXMLLoader.html) | [langchain_community](https://api.python.langchain.com/en/latest/community_api_reference.html) | ✅ | ❌ | ✅ | \n", + "### Loader features\n", + "| Source | Document Lazy Loading | Native Async Support\n", + "| :---: | :---: | :---: | \n", + "| UnstructuredXMLLoader | ✅ | ❌ | \n", + "\n", + "## Setup\n", + "\n", + "To access UnstructuredXMLLoader document loader you'll need to install the `langchain-community` integration package.\n", + "\n", + "### Credentials\n", + "\n", + "No credentials are needed to use the UnstructuredXMLLoader" + ] + }, + { + "cell_type": "markdown", + "id": "fc4ba987", + "metadata": {}, + "source": [ + "If you want to get automated best in-class tracing of your model calls you can also set your [LangSmith](https://docs.smith.langchain.com/) API key by uncommenting below:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9fa4d5e5", + "metadata": {}, + "outputs": [], + "source": [ + "# os.environ[\"LANGSMITH_API_KEY\"] = getpass.getpass(\"Enter your LangSmith API key: \")\n", + "# os.environ[\"LANGSMITH_TRACING\"] = \"true\"" + ] + }, + { + "cell_type": "markdown", + "id": "38e53f22", + "metadata": {}, + "source": [ + "### Installation\n", + "\n", + "Install **langchain_community**." ] }, { "cell_type": "code", - "execution_count": 1, - "id": "e6616e3a", + "execution_count": null, + "id": "fcd320ec", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install -qU langchain_community" + ] + }, + { + "cell_type": "markdown", + "id": "a102f199", + "metadata": {}, + "source": [ + "## Initialization\n", + "\n", + "Now we can instantiate our model object and load documents:" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "2d198582", "metadata": {}, "outputs": [], "source": [ @@ -21,18 +91,91 @@ "\n", "loader = UnstructuredXMLLoader(\n", " \"./example_data/factbook.xml\",\n", - ")\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "9bbb463c", + "metadata": {}, + "source": [ + "## Load" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "cd875e75", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Document(metadata={'source': './example_data/factbook.xml'}, page_content='United States\\n\\nWashington, DC\\n\\nJoe Biden\\n\\nBaseball\\n\\nCanada\\n\\nOttawa\\n\\nJustin Trudeau\\n\\nHockey\\n\\nFrance\\n\\nParis\\n\\nEmmanuel Macron\\n\\nSoccer\\n\\nTrinidad & Tobado\\n\\nPort of Spain\\n\\nKeith Rowley\\n\\nTrack & Field')" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ "docs = loader.load()\n", "docs[0]" ] }, { "cell_type": "code", - "execution_count": null, - "id": "a54342bb", + "execution_count": 4, + "id": "79b52cc0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'source': './example_data/factbook.xml'}\n" + ] + } + ], + "source": [ + "print(docs[0].metadata)" + ] + }, + { + "cell_type": "markdown", + "id": "557608e5", + "metadata": {}, + "source": [ + "## Lazy Load" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "e3b9e75c", "metadata": {}, "outputs": [], - "source": [] + "source": [ + "page = []\n", + "for doc in loader.lazy_load():\n", + " page.append(doc)\n", + " if len(page) >= 10:\n", + " # do some paged operation, e.g.\n", + " # index.upsert(page)\n", + "\n", + " page = []" + ] + }, + { + "cell_type": "markdown", + "id": "712aa98f", + "metadata": {}, + "source": [ + "## API reference\n", + "\n", + "For detailed documentation of all __ModuleName__Loader features and configurations head to the API reference: https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.xml.UnstructuredXMLLoader.html" + ] } ], "metadata": { @@ -51,7 +194,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.15" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/docs/docs/integrations/providers/notion.mdx b/docs/docs/integrations/providers/notion.mdx index 7f5136868008b..6ed4fd306fc93 100644 --- a/docs/docs/integrations/providers/notion.mdx +++ b/docs/docs/integrations/providers/notion.mdx @@ -12,16 +12,9 @@ All instructions are in examples below. We have two different loaders: `NotionDirectoryLoader` and `NotionDBLoader`. -See a [usage example for the NotionDirectoryLoader](/docs/integrations/document_loaders/notion). +See [usage examples here](/docs/integrations/document_loaders/notion). ```python -from langchain_community.document_loaders import NotionDirectoryLoader -``` - -See a [usage example for the NotionDBLoader](/docs/integrations/document_loaders/notiondb). - - -```python -from langchain_community.document_loaders import NotionDBLoader +from langchain_community.document_loaders import NotionDirectoryLoader, NotionDBLoader ``` diff --git a/docs/src/theme/FeatureTables.js b/docs/src/theme/FeatureTables.js index f83213708126d..1d44350eb0e33 100644 --- a/docs/src/theme/FeatureTables.js +++ b/docs/src/theme/FeatureTables.js @@ -440,6 +440,266 @@ const FEATURE_TABLES = { columns: [], items: [], }, + cloud_provider_loaders: { + link: 'docs/integrations/loaders', + columns: [ + {title: "Document Loader", formatter: (item) => {item.name}}, + {title: "Description", formatter: (item) => item.source}, + {title: "Partner Package", formatter: (item) => item.partnerPackage ? "✅" : "❌"}, + {title: "API reference", formatter: (item) => {item.loaderName}}, + ], + items: [ + { + name: "AWS S3 Directory", + link: "aws_s3_directory", + source: "Load documents from an AWS S3 directory", + partnerPackage: false, + loaderName: "S3DirectoryLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.s3_directory.S3DirectoryLoader.html" + }, + { + name: "AWS S3 File", + link: "aws_s3_file", + source: "Load documents from an AWS S3 file", + partnerPackage: false, + loaderName: "S3FileLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.s3_file.S3FileLoader.html" + }, + { + name: "Azure AI Data", + link: "azure_ai_data", + source: "Load documents from Azure AI services", + partnerPackage: false, + loaderName: "AzureAIDataLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.azure_ai_data.AzureAIDataLoader.html" + }, + { + name: "Azure Blob Storage Container", + link: "azure_blob_storage_container", + source: "Load documents from an Azure Blob Storage container", + partnerPackage: false, + loaderName: "AzureBlobStorageContainerLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.azure_blob_storage_container.AzureBlobStorageContainerLoader.html" + }, + { + name: "Azure Blob Storage File", + link: "azure_blob_storage_file", + source: "Load documents from an Azure Blob Storage file", + partnerPackage: false, + loaderName: "AzureBlobStorageFileLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.azure_blob_storage_file.AzureBlobStorageFileLoader.html" + }, + { + name: "Dropbox", + link: "dropbox", + source: "Load documents from Dropbox", + partnerPackage: false, + loaderName: "DropboxLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.dropbox.DropboxLoader.html" + }, + { + name: "Google Cloud Storage Directory", + link: "google_cloud_storage_directory", + source: "Load documents from GCS bucket", + partnerPackage: true, + loaderName: "GCSDirectoryLoader", + apiLink: "https://api.python.langchain.com/en/latest/gcs_directory/langchain_google_community.gcs_directory.GCSDirectoryLoader.html" + }, + { + name: "Google Cloud Storage File", + link: "google_cloud_storage_file", + source: "Load documents from GCS file object", + partnerPackage: true, + loaderName: "GCSFileLoader", + apiLink: "https://api.python.langchain.com/en/latest/gcs_file/langchain_google_community.gcs_file.GCSFileLoader.html" + }, + { + name: "Google Drive", + link: "google_drive", + source: "Load documents from Google Drive (Google Docs only)", + partnerPackage: true, + loaderName: "GoogleDriveLoader", + apiLink: "https://api.python.langchain.com/en/latest/drive/langchain_google_community.drive.GoogleDriveLoader.html" + }, + { + name: "Huawei OBS Directory", + link: "huawei_obs_directory", + source: "Load documents from Huawei Object Storage Service Directory", + partnerPackage: false, + loaderName: "OBSDirectoryLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.obs_directory.OBSDirectoryLoader.html" + }, + { + name: "Huawei OBS File", + link: "huawei_obs_file", + source: "Load documents from Huawei Object Storage Service File", + partnerPackage: false, + loaderName: "OBSFileLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.obs_file.OBSFileLoader.html" + }, + { + name: "Microsoft OneDrive", + link: "microsoft_onedrive", + source: "Load documents from Microsoft OneDrive", + partnerPackage: false, + loaderName: "OneDriveLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.onedrive.OneDriveLoader.html" + }, + { + name: "Microsoft SharePoint", + link: "microsoft_sharepoint", + source: "Load documents from Microsoft SharePoint", + partnerPackage: false, + loaderName: "SharePointLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.sharepoint.SharePointLoader.html" + + }, + { + name: "Tencent COS Directory", + link: "tencent_cos_directory", + source: "Load documents from Tencent Cloud Object Storage Directory", + partnerPackage: false, + loaderName: "TencentCOSDirectoryLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.tencent_cos_directory.TencentCOSDirectoryLoader.html" + }, + { + name: "Tencent COS File", + link: "tencent_cos_file", + source: "Load documents from Tencent Cloud Object Storage File", + partnerPackage: false, + loaderName: "TencentCOSFileLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.tencent_cos_file.TencentCOSFileLoader.html" + }, + ] + }, + messaging_loaders: { + link: 'docs/integrations/loaders', + columns: [ + {title: "Document Loader", formatter: (item) => {item.name}}, + {title: "API reference", formatter: (item) => {item.loaderName}}, + ], + items: [ + { + name: "Telegram", + link: "telegram", + loaderName: "TelegramChatFileLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.telegram.TelegramChatFileLoader.html" + }, + { + name: "WhatsApp", + link: "whatsapp_chat", + loaderName: "WhatsAppChatLoader", + apiLink: "https://api.python.langchain.com/en/latest/chat_loaders/langchain_community.chat_loaders.whatsapp.WhatsAppChatLoader.html" + }, + { + name: "Discord", + link: "discord", + loaderName: "DiscordChatLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.discord.DiscordChatLoader.html" + }, + { + name: "Facebook Chat", + link: "facebook_chat", + loaderName: "FacebookChatLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.facebook_chat.FacebookChatLoader.html" + }, + { + name: "Mastodon", + link: "mastodon", + loaderName: "MastodonTootsLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.mastodon.MastodonTootsLoader.html" + } + ] + }, + productivity_loaders: { + link: 'docs/integrations/loaders', + columns: [ + {title: "Document Loader", formatter: (item) => {item.name}}, + {title: "API reference", formatter: (item) => {item.loaderName}}, + ], + items: [ + { + name: "Figma", + link: "figma", + loaderName: "FigmaFileLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.figma.FigmaFileLoader.html" + }, + { + name: "Notion", + link: "notion", + loaderName: "NotionDirectoryLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.notion.NotionDirectoryLoader.html" + }, + { + name: "Slack", + link: "slack", + loaderName: "SlackDirectoryLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.slack_directory.SlackDirectoryLoader.html" + }, + { + name: "Quip", + link: "quip", + loaderName: "QuipLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.quip.QuipLoader.html" + }, + { + name: "Trello", + link: "trello", + loaderName: "TrelloLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.trello.TrelloLoader.html" + }, + { + name: "Roam", + link: "roam", + loaderName: "RoamLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.roam.RoamLoader.html" + }, + { + name: "GitHub", + link: "github", + loaderName: "GithubFileLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.github.GithubFileLoader.html" + } + ] + }, + social_loaders: { + link: 'docs/integrations/loaders', + columns: [ + {title: "Document Loader", formatter: (item) => {item.name}}, + {title: "API reference", formatter: (item) => {item.loaderName}}, + ], + items: [ + { + name: "Twitter", + link: "twitter", + loaderName: "TwitterTweetLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.twitter.TwitterTweetLoader.html" + + }, + { + name: "Reddit", + link: "RedditPostsLoader", + loaderName: "RedditPostsLoader", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.reddit.RedditPostsLoader.html" + }, + ] + }, webpage_loaders: { link: 'docs/integrations/loaders', columns: [ @@ -606,6 +866,12 @@ const FEATURE_TABLES = { link: "bshtml", source: "HTML files", apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.html_bs.BSHTMLLoader.html" + }, + { + name: "UnstrucutredXMLLoader", + link: "xml", + source: "XML files", + apiLink: "https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.xml.UnstructuredXMLLoader.html" } ] }, diff --git a/docs/vercel.json b/docs/vercel.json index 6afbee9e5e184..5c91cb3fb3bd9 100644 --- a/docs/vercel.json +++ b/docs/vercel.json @@ -102,6 +102,10 @@ "source": "/v0.2/docs/integrations/toolkits/xorbits/", "destination": "/v0.2/docs/integrations/tools#search" }, + { + "source": "/v0.2/docs/integrations/document_loaders/notiondb/", + "destination": "/v0.2/docs/integrations/document_loaders/notion/" + }, { "source": "/v0.2/docs/integrations/chat/ollama_functions/", "destination": "https://python.langchain.com/v0.1/docs/integrations/chat/ollama_functions/" From 3e296e39c8739cffe91d117e28d5fa99b97ec129 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Tue, 20 Aug 2024 11:08:24 -0700 Subject: [PATCH 27/80] docs: update examples in api ref (#25589) --- docs/api_reference/guide_imports.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/api_reference/guide_imports.json b/docs/api_reference/guide_imports.json index ade5a724e088f..ed408b974c657 100644 --- a/docs/api_reference/guide_imports.json +++ b/docs/api_reference/guide_imports.json @@ -1 +1 @@ -{"ChatPromptTemplate": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "Conceptual guide": "https://python.langchain.com/v0.2/docs/concepts/", "How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to add values to a chain's state": "https://python.langchain.com/v0.2/docs/how_to/assign/", "How to do per-user retrieval": "https://python.langchain.com/v0.2/docs/how_to/qa_per_user/", "How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "How to create a custom LLM class": "https://python.langchain.com/v0.2/docs/how_to/custom_llm/", "How to inspect runnables": "https://python.langchain.com/v0.2/docs/how_to/inspect/", "How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to use few shot examples in chat models": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples_chat/", "How to do tool/function calling": "https://python.langchain.com/v0.2/docs/how_to/function_calling/", "How to use prompting alone (no tool calling) to do extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_parse/", "How to deal with large databases when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_large_db/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to add fallbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/fallbacks/", "How to propagate callbacks constructor": "https://python.langchain.com/v0.2/docs/how_to/callbacks_constructor/", "How to map values to a graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_mapping/", "How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to attach callbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/callbacks_attach/", "How to use a model to call tools": "https://python.langchain.com/v0.2/docs/how_to/tool_calling", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to add default invocation args to a Runnable": "https://python.langchain.com/v0.2/docs/how_to/binding/", "How to create a dynamic (self-constructing) chain": "https://python.langchain.com/v0.2/docs/how_to/dynamic_chain/", "How to create custom callback handlers": "https://python.langchain.com/v0.2/docs/how_to/custom_callbacks/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to invoke runnables in parallel": "https://python.langchain.com/v0.2/docs/how_to/parallel/", "How to pass through arguments from one step to the next": "https://python.langchain.com/v0.2/docs/how_to/passthrough/", "How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add message history": "https://python.langchain.com/v0.2/docs/how_to/message_history/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "How to run custom functions": "https://python.langchain.com/v0.2/docs/how_to/functions/", "How to add memory to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_memory/", "How deal with high cardinality categoricals when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_high_cardinality/", "How to return structured data from a model": "https://python.langchain.com/v0.2/docs/how_to/structured_output/", "How to add ad-hoc tool calling capability to LLMs and Chat Models": "https://python.langchain.com/v0.2/docs/how_to/tools_prompting/", "LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/", "How to debug your LLM apps": "https://python.langchain.com/v0.2/docs/how_to/debugging/", "How to chain runnables": "https://python.langchain.com/v0.2/docs/how_to/sequence/", "Hybrid Search": "https://python.langchain.com/v0.2/docs/how_to/hybrid/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "How to do query validation as part of SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_query_checking/", "How to use multimodal prompts": "https://python.langchain.com/v0.2/docs/how_to/multimodal_prompts/", "How to pass callbacks in at runtime": "https://python.langchain.com/v0.2/docs/how_to/callbacks_runtime/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "LangSmith LLM Runs": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/", "iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "NVIDIA NIMs ": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "You.com": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "AskNews": "https://python.langchain.com/v0.2/docs/integrations/retrievers/asknews/", "Tavily Search API": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tavily/", "Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "Google SQL for MySQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mysql/", "Google AlloyDB for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_alloydb/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "AWS DynamoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/aws_dynamodb/", "MongoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/mongodb_chat_message_history/", "SQL (SQLAlchemy)": "https://python.langchain.com/v0.2/docs/integrations/memory/sql_chat_message_history/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "Google El Carro Oracle": "https://python.langchain.com/v0.2/docs/integrations/memory/google_el_carro/", "SQLite": "https://python.langchain.com/v0.2/docs/integrations/memory/sqlite/", "Google SQL for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_pg/", "Google SQL for SQL Server": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "TiDB": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "Kinetica SqlAssist LLM Demo": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/", "ChatAnthropic": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "Groq": "https://python.langchain.com/v0.2/docs/integrations/chat/groq/", "ChatOpenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/openai/", "ChatVertexAI": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "JinaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/jinachat/", "ChatOllama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "MistralAI": "https://python.langchain.com/v0.2/docs/integrations/chat/mistralai/", "ChatAI21": "https://python.langchain.com/v0.2/docs/integrations/chat/ai21/", "Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/chat/edenai/", "vLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "ChatPerplexity": "https://python.langchain.com/v0.2/docs/integrations/chat/perplexity/", "ChatUpstage": "https://python.langchain.com/v0.2/docs/integrations/chat/upstage/", "NVIDIA NIMs": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "Context": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "Fiddler": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "SQL Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "OpenAI metadata tagger": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/", "Figma": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Build an Extraction Chain": "https://python.langchain.com/v0.2/docs/tutorials/extraction/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Classify Text into Labels": "https://python.langchain.com/v0.2/docs/tutorials/classification/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/", "Build a Simple LLM Application": "https://python.langchain.com/v0.2/docs/tutorials/llm_chain/", "Vector stores and retrievers": "https://python.langchain.com/v0.2/docs/tutorials/retrievers/"}, "ChatAnthropic": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "How to use callbacks in async environments": "https://python.langchain.com/v0.2/docs/how_to/callbacks_async/", "How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/", "How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "How to parse XML output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_xml/", "How to use prompting alone (no tool calling) to do extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_parse/", "How to add fallbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/fallbacks/", "How to propagate callbacks constructor": "https://python.langchain.com/v0.2/docs/how_to/callbacks_constructor/", "How to stream chat model responses": "https://python.langchain.com/v0.2/docs/how_to/chat_streaming/", "How to attach callbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/callbacks_attach/", "How to create a dynamic (self-constructing) chain": "https://python.langchain.com/v0.2/docs/how_to/dynamic_chain/", "How to create custom callback handlers": "https://python.langchain.com/v0.2/docs/how_to/custom_callbacks/", "How to configure runtime chain internals": "https://python.langchain.com/v0.2/docs/how_to/configure/", "How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/", "Response metadata": "https://python.langchain.com/v0.2/docs/how_to/response_metadata/", "How to pass callbacks in at runtime": "https://python.langchain.com/v0.2/docs/how_to/callbacks_runtime/", "Anthropic": "https://python.langchain.com/v0.2/docs/integrations/platforms/anthropic/", "ChatAnthropic": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "Python": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "PlayWright Browser": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/", "Build an Agent": "https://python.langchain.com/v0.2/docs/tutorials/agents/"}, "ChatOpenAI": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to add values to a chain's state": "https://python.langchain.com/v0.2/docs/how_to/assign/", "How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "How to inspect runnables": "https://python.langchain.com/v0.2/docs/how_to/inspect/", "How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to use few shot examples in chat models": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples_chat/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to add fallbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/fallbacks/", "How to map values to a graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_mapping/", "How to pass multimodal data directly to models": "https://python.langchain.com/v0.2/docs/how_to/multimodal_inputs/", "How to use the output-fixing parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_fixing/", "How to use a model to call tools": "https://python.langchain.com/v0.2/docs/how_to/tool_calling", "How to convert tools to OpenAI Functions": "https://python.langchain.com/v0.2/docs/how_to/tools_as_openai_functions/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to add default invocation args to a Runnable": "https://python.langchain.com/v0.2/docs/how_to/binding/", "How to invoke runnables in parallel": "https://python.langchain.com/v0.2/docs/how_to/parallel/", "How to pass through arguments from one step to the next": "https://python.langchain.com/v0.2/docs/how_to/passthrough/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to configure runtime chain internals": "https://python.langchain.com/v0.2/docs/how_to/configure/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "How to construct knowledge graphs": "https://python.langchain.com/v0.2/docs/how_to/graph_constructing/", "How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/", "How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/", "How to run custom functions": "https://python.langchain.com/v0.2/docs/how_to/functions/", "How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "How to add memory to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_memory/", "How deal with high cardinality categoricals when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_high_cardinality/", "How to get log probabilities": "https://python.langchain.com/v0.2/docs/how_to/logprobs/", "How to parse YAML output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_yaml/", "Response metadata": "https://python.langchain.com/v0.2/docs/how_to/response_metadata/", "How to parse JSON output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_json/", "How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/", "Hybrid Search": "https://python.langchain.com/v0.2/docs/how_to/hybrid/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "How to use multimodal prompts": "https://python.langchain.com/v0.2/docs/how_to/multimodal_prompts/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "WeChat": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/wechat/", "Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "LangSmith LLM Runs": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/", "Slack": "https://python.langchain.com/v0.2/docs/integrations/toolkits/slack/", "WhatsApp": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/whatsapp/", "LangSmith Chat Datasets": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_dataset/", "iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "Telegram": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/telegram/", "Discord": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "You.com": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "AskNews": "https://python.langchain.com/v0.2/docs/integrations/tools/asknews/", "LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "Outline": "https://python.langchain.com/v0.2/docs/integrations/retrievers/outline/", "Wikipedia": "https://python.langchain.com/v0.2/docs/integrations/retrievers/wikipedia/", "SEC filing": "https://python.langchain.com/v0.2/docs/integrations/retrievers/sec_filings/", "Tavily Search API": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tavily/", "Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "Arxiv": "https://python.langchain.com/v0.2/docs/integrations/retrievers/arxiv/", "Rememberizer": "https://python.langchain.com/v0.2/docs/integrations/retrievers/rememberizer/", "Milvus Hybrid Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/milvus_hybrid_search/", "Kay.ai": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kay/", "FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/tencentvectordb/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/myscale_self_query/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "ChatGPT Plugins": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "Connery Action Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/connery/", "Tavily Search": "https://python.langchain.com/v0.2/docs/integrations/tools/tavily_search/", "Infobip": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/", "E2B Data Analysis": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/", "Human as a tool": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "Azure Container Apps dynamic sessions": "https://python.langchain.com/v0.2/docs/integrations/tools/azure_dynamic_sessions/", "Yahoo Finance News": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/", "Semantic Scholar API Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/", "Passio NutritionAI": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/", "Bearly Code Interpreter": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/", "ArXiv": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "Exa Search": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "You.com Search": "https://python.langchain.com/v0.2/docs/integrations/tools/you/", "Shell (bash)": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "AWS DynamoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/aws_dynamodb/", "MongoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/mongodb_chat_message_history/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Remembrall": "https://python.langchain.com/v0.2/docs/integrations/memory/remembrall/", "SQL (SQLAlchemy)": "https://python.langchain.com/v0.2/docs/integrations/memory/sql_chat_message_history/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "SQLite": "https://python.langchain.com/v0.2/docs/integrations/memory/sqlite/", "TiDB": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "ChatOpenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/openai/", "vLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Context": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "Label Studio": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "Trubrics": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "Infino": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "Arthur": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/dataherald/", "Log, Trace, and Monitor": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "Connery Toolkit": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/", "CSV": "https://python.langchain.com/v0.2/docs/integrations/toolkits/csv/", "Document Comparison": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "Python": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "PowerBI Dataset": "https://python.langchain.com/v0.2/docs/integrations/toolkits/powerbi/", "SQL Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Gmail": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gmail/", "Cassandra Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cassandra_database/", "Airbyte Question Answering": "https://python.langchain.com/v0.2/docs/integrations/toolkits/airbyte_structured_qa/", "Github": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "Spark SQL": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark_sql/", "AINetwork": "https://python.langchain.com/v0.2/docs/integrations/toolkits/ainetwork/", "# Cogniswitch Tools": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cogniswitch/", "Pandas Dataframe": "https://python.langchain.com/v0.2/docs/integrations/toolkits/pandas/", "Robocorp": "https://python.langchain.com/v0.2/docs/integrations/toolkits/robocorp/", "MultiOn": "https://python.langchain.com/v0.2/docs/integrations/toolkits/multion/", "Amadeus": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/", "OpenAPI": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/", "Polygon IO Toolkit": "https://python.langchain.com/v0.2/docs/integrations/toolkits/polygon/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "Hippo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "Momento Vector Index (MVI)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "Neo4j Vector Index": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "KDB.AI": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/", "OpenAI metadata tagger": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Figma": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "Browserbase": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/browserbase/", "Memgraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/", "RDFLib": "https://python.langchain.com/v0.2/docs/integrations/graphs/rdflib_sparql/", "NebulaGraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/nebula_graph/", "HugeGraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/hugegraph/", "Diffbot": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/", "Ontotext GraphDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/ontotext/", "Apache AGE": "https://python.langchain.com/v0.2/docs/integrations/graphs/apache_age/", "Neo4j": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "ArangoDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/arangodb/", "Amazon Neptune with Cypher": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/", "Kuzu": "https://python.langchain.com/v0.2/docs/integrations/graphs/kuzu_db/", "FalkorDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/falkordb/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Classify Text into Labels": "https://python.langchain.com/v0.2/docs/tutorials/classification/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/", "Build a Simple LLM Application": "https://python.langchain.com/v0.2/docs/tutorials/llm_chain/", "Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/", "Build a Question Answering application over a Graph Database": "https://python.langchain.com/v0.2/docs/tutorials/graph/"}, "SystemMessage": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/", "How to compose prompts together": "https://python.langchain.com/v0.2/docs/how_to/prompts_composition/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Exa Search": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "solar.md": "https://python.langchain.com/v0.2/docs/integrations/chat/solar/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "ChatFireworks": "https://python.langchain.com/v0.2/docs/integrations/chat/fireworks/", "ChatOctoAI": "https://python.langchain.com/v0.2/docs/integrations/chat/octoai/", "LlamaEdge": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_edge/", "Google AI chat models": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/", "ChatKonko": "https://python.langchain.com/v0.2/docs/integrations/chat/konko/", "GigaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/gigachat/", "JinaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/jinachat/", "ChatEverlyAI": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/", "ChatFriendli": "https://python.langchain.com/v0.2/docs/integrations/chat/friendli/", "ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "vLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "ChatTongyi": "https://python.langchain.com/v0.2/docs/integrations/chat/tongyi/", "MoonshotChat": "https://python.langchain.com/v0.2/docs/integrations/chat/moonshot/", "ChatPremAI": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "ChatAnyscale": "https://python.langchain.com/v0.2/docs/integrations/chat/anyscale/", "ChatYandexGPT": "https://python.langchain.com/v0.2/docs/integrations/chat/yandex/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Context": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "Label Studio": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "Trubrics": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "SQL Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Robocorp": "https://python.langchain.com/v0.2/docs/integrations/toolkits/robocorp/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "Javelin AI Gateway Tutorial": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/", "Build a Simple LLM Application": "https://python.langchain.com/v0.2/docs/tutorials/llm_chain/"}, "HumanMessage": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "Conceptual guide": "https://python.langchain.com/v0.2/docs/concepts/", "Build an Agent": "https://python.langchain.com/v0.2/docs/tutorials/agents/", "How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to use callbacks in async environments": "https://python.langchain.com/v0.2/docs/how_to/callbacks_async/", "How to do tool/function calling": "https://python.langchain.com/v0.2/docs/how_to/function_calling/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to pass multimodal data directly to models": "https://python.langchain.com/v0.2/docs/how_to/multimodal_inputs/", "How to use a model to call tools": "https://python.langchain.com/v0.2/docs/how_to/tool_calling", "How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/", "How to convert tools to OpenAI Functions": "https://python.langchain.com/v0.2/docs/how_to/tools_as_openai_functions/", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add message history": "https://python.langchain.com/v0.2/docs/how_to/message_history/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to add memory to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_memory/", "How to return structured data from a model": "https://python.langchain.com/v0.2/docs/how_to/structured_output/", "How to compose prompts together": "https://python.langchain.com/v0.2/docs/how_to/prompts_composition/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "WeChat": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/wechat/", "Discord": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "Zep Open Source": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/", "Zep Cloud": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_cloud_memorystore/", "Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Imagen": "https://python.langchain.com/v0.2/docs/integrations/tools/google_imagen/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/", "solar.md": "https://python.langchain.com/v0.2/docs/integrations/chat/solar/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "AzureMLChatOnlineEndpoint": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/", "Alibaba Cloud PAI EAS": "https://python.langchain.com/v0.2/docs/integrations/chat/alibaba_cloud_pai_eas/", "Chat with Coze Bot": "https://python.langchain.com/v0.2/docs/integrations/chat/coze/", "ChatFireworks": "https://python.langchain.com/v0.2/docs/integrations/chat/fireworks/", "ChatOctoAI": "https://python.langchain.com/v0.2/docs/integrations/chat/octoai/", "DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/chat/deepinfra/", "ChatAnthropic": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "ChatLiteLLM": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/", "LlamaEdge": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_edge/", "Google AI chat models": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/", "OllamaFunctions": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama_functions/", "VolcEngineMaasChat": "https://python.langchain.com/v0.2/docs/integrations/chat/volcengine_maas/", "ChatKonko": "https://python.langchain.com/v0.2/docs/integrations/chat/konko/", "ChatVertexAI": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "ChatBedrock": "https://python.langchain.com/v0.2/docs/integrations/chat/bedrock/", "MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "GigaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/gigachat/", "JinaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/jinachat/", "ChatOllama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "AzureChatOpenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/azure_chat_openai/", "ChatEverlyAI": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/", "GPTRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/", "ChatLiteLLMRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/", "ChatFriendli": "https://python.langchain.com/v0.2/docs/integrations/chat/friendli/", "MistralAI": "https://python.langchain.com/v0.2/docs/integrations/chat/mistralai/", "ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Chat with Baichuan-192K": "https://python.langchain.com/v0.2/docs/integrations/chat/baichuan/", "QianfanChatEndpoint": "https://python.langchain.com/v0.2/docs/integrations/chat/baidu_qianfan_endpoint/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/llms/cohere/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/chat/edenai/", "ErnieBotChat": "https://python.langchain.com/v0.2/docs/integrations/chat/ernie/", "vLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "Tencent Hunyuan": "https://python.langchain.com/v0.2/docs/integrations/chat/tencent_hunyuan/", "MiniMaxChat": "https://python.langchain.com/v0.2/docs/integrations/chat/minimax/", "Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "ChatTongyi": "https://python.langchain.com/v0.2/docs/integrations/chat/tongyi/", "PromptLayerChatOpenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/promptlayer_chatopenai/", "SparkLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/sparkllm/", "MoonshotChat": "https://python.langchain.com/v0.2/docs/integrations/chat/moonshot/", "Dappier AI": "https://python.langchain.com/v0.2/docs/integrations/chat/dappier/", "Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "ChatPremAI": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "ChatAnyscale": "https://python.langchain.com/v0.2/docs/integrations/chat/anyscale/", "ChatYandexGPT": "https://python.langchain.com/v0.2/docs/integrations/chat/yandex/", "NVIDIA NIMs": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Context": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "Label Studio": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "Trubrics": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "-> content='Hello! How can I assist you today?'": "https://python.langchain.com/v0.2/docs/integrations/providers/databricks/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/", "Arthur": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Browserbase": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/browserbase/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "Databricks": "https://python.langchain.com/v0.2/docs/integrations/llms/databricks/", "Javelin AI Gateway Tutorial": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/", "Build a Simple LLM Application": "https://python.langchain.com/v0.2/docs/tutorials/llm_chain/"}, "RunnableMap": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/"}, "RunnableLambda": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/", "How to run custom functions": "https://python.langchain.com/v0.2/docs/how_to/functions/", "LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "Vector stores and retrievers": "https://python.langchain.com/v0.2/docs/tutorials/retrievers/"}, "PromptTemplate": {"Conceptual guide": "https://python.langchain.com/v0.2/docs/concepts/", "How to better prompt when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_prompting/", "How to use output parsers to parse an LLM response into structured format": "https://python.langchain.com/v0.2/docs/how_to/output_parser_structured/", "How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/", "How to select examples by n-gram overlap": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_ngram/", "How to select examples by length": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_length_based/", "How to use example selectors": "https://python.langchain.com/v0.2/docs/how_to/example_selectors/", "How to use few shot examples": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples/", "How to select examples by similarity": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_similarity/", "How to parse XML output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_xml/", "How to reorder retrieved results to mitigate the \"lost in the middle\" effect": "https://python.langchain.com/v0.2/docs/how_to/long_context_reorder/", "How to add fallbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/fallbacks/", "Run LLMs locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/", "How to configure runtime chain internals": "https://python.langchain.com/v0.2/docs/how_to/configure/", "How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/", "How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/", "How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "How to parse YAML output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_yaml/", "How to compose prompts together": "https://python.langchain.com/v0.2/docs/how_to/prompts_composition/", "How to partially format prompt templates": "https://python.langchain.com/v0.2/docs/how_to/prompts_partial/", "How to parse JSON output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_json/", "How to select examples by maximal marginal relevance (MMR)": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_mmr/", "How to track token usage for LLMs": "https://python.langchain.com/v0.2/docs/how_to/llm_token_usage_tracking/", "Clarifai": "https://python.langchain.com/v0.2/docs/integrations/llms/clarifai/", "RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "Google Drive": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_drive/", "Milvus Hybrid Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/milvus_hybrid_search/", "Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "NVIDIA Riva: ASR and TTS": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/", "Exa Search": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "Dall-E Image Generator": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "Mot\u00f6rhead": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/", "OllamaFunctions": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama_functions/", "Context": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "SageMaker Tracking": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Argilla": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "DSPy": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "Rebuff": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Prediction Guard": "https://python.langchain.com/v0.2/docs/integrations/llms/predictionguard/", "MLflow": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Shale Protocol": "https://python.langchain.com/v0.2/docs/integrations/providers/shaleprotocol/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "Ray Serve": "https://python.langchain.com/v0.2/docs/integrations/providers/ray_serve/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "Amazon Document DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "AirbyteLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte/", "Memgraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/", "Apache AGE": "https://python.langchain.com/v0.2/docs/integrations/graphs/apache_age/", "Neo4j": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "Baseten": "https://python.langchain.com/v0.2/docs/integrations/llms/baseten/", "StochasticAI": "https://python.langchain.com/v0.2/docs/integrations/llms/stochasticai/", "Solar": "https://python.langchain.com/v0.2/docs/integrations/llms/solar/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "IPEX-LLM": "https://python.langchain.com/v0.2/docs/integrations/llms/ipex_llm/", "Banana": "https://python.langchain.com/v0.2/docs/integrations/llms/banana/", "Alibaba Cloud PAI EAS": "https://python.langchain.com/v0.2/docs/integrations/llms/alibabacloud_pai_eas_endpoint/", "OpenLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/openllm/", "SageMakerEndpoint": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/", "Fireworks": "https://python.langchain.com/v0.2/docs/integrations/llms/fireworks/", "OctoAI": "https://python.langchain.com/v0.2/docs/integrations/llms/octoai/", "Writer": "https://python.langchain.com/v0.2/docs/integrations/llms/writer/", "Modal": "https://python.langchain.com/v0.2/docs/integrations/llms/modal/", "TextGen": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "Xorbits Inference (Xinference)": "https://python.langchain.com/v0.2/docs/integrations/llms/xinference/", "Nebula (Symbl.ai)": "https://python.langchain.com/v0.2/docs/integrations/llms/symblai_nebula/", "DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/llms/deepinfra/", "AnthropicLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/anthropic/", "NLP Cloud": "https://python.langchain.com/v0.2/docs/integrations/llms/nlpcloud/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/llms/gpt4all/", "ForefrontAI": "https://python.langchain.com/v0.2/docs/integrations/llms/forefrontai/", "MosaicML": "https://python.langchain.com/v0.2/docs/integrations/llms/mosaicml/", "Volc Engine Maas": "https://python.langchain.com/v0.2/docs/integrations/llms/volcengine_maas/", "CerebriumAI": "https://python.langchain.com/v0.2/docs/integrations/llms/cerebriumai/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/llms/openai/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "Predibase": "https://python.langchain.com/v0.2/docs/integrations/llms/predibase/", "GigaChat": "https://python.langchain.com/v0.2/docs/integrations/llms/gigachat/", "# Oracle Cloud Infrastructure Generative AI": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "Hugging Face Local Pipelines": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_pipelines/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Titan Takeoff": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/", "Aphrodite Engine": "https://python.langchain.com/v0.2/docs/integrations/llms/aphrodite/", "AI21LLM": "https://python.langchain.com/v0.2/docs/integrations/llms/ai21/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/llms/cohere/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "Javelin AI Gateway Tutorial": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "IBM watsonx.ai": "https://python.langchain.com/v0.2/docs/integrations/llms/ibm_watsonx/", "C Transformers": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/", "vLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/", "Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/", "Manifest": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/", "ExLlamaV2": "https://python.langchain.com/v0.2/docs/integrations/llms/exllamav2/", "Minimax": "https://python.langchain.com/v0.2/docs/integrations/llms/minimax/", "Tongyi Qwen": "https://python.langchain.com/v0.2/docs/integrations/llms/tongyi/", "Huggingface Endpoints": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/", "MLX Local Pipelines": "https://python.langchain.com/v0.2/docs/integrations/llms/mlx_pipelines/", "Runhouse": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/", "Anyscale": "https://python.langchain.com/v0.2/docs/integrations/llms/anyscale/", "YandexGPT": "https://python.langchain.com/v0.2/docs/integrations/llms/yandex/", "GooseAI": "https://python.langchain.com/v0.2/docs/integrations/llms/gooseai/", "OpenLM": "https://python.langchain.com/v0.2/docs/integrations/llms/openlm/", "Aleph Alpha": "https://python.langchain.com/v0.2/docs/integrations/llms/aleph_alpha/", "Cloudflare Workers AI": "https://python.langchain.com/v0.2/docs/integrations/llms/cloudflare_workersai/", "CTranslate2": "https://python.langchain.com/v0.2/docs/integrations/llms/ctranslate2/", "Google AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_ai/", "PipelineAI": "https://python.langchain.com/v0.2/docs/integrations/llms/pipelineai/", "ChatGLM": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/", "Gradient": "https://python.langchain.com/v0.2/docs/integrations/llms/gradient/", "Petals": "https://python.langchain.com/v0.2/docs/integrations/llms/petals/", "OpenVINO": "https://python.langchain.com/v0.2/docs/integrations/llms/openvino/", "Intel Weight-Only Quantization": "https://python.langchain.com/v0.2/docs/integrations/llms/weight_only_quantization/", "Replicate": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/", "Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "MessagesPlaceholder": {"Conceptual guide": "https://python.langchain.com/v0.2/docs/concepts/", "How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add message history": "https://python.langchain.com/v0.2/docs/how_to/message_history/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "How to add memory to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_memory/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "Google SQL for MySQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mysql/", "Google AlloyDB for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_alloydb/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "AWS DynamoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/aws_dynamodb/", "MongoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/mongodb_chat_message_history/", "SQL (SQLAlchemy)": "https://python.langchain.com/v0.2/docs/integrations/memory/sql_chat_message_history/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "Google El Carro Oracle": "https://python.langchain.com/v0.2/docs/integrations/memory/google_el_carro/", "SQLite": "https://python.langchain.com/v0.2/docs/integrations/memory/sqlite/", "Google SQL for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_pg/", "Google SQL for SQL Server": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "TiDB": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "SQL Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Github": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "Build an Extraction Chain": "https://python.langchain.com/v0.2/docs/tutorials/extraction/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "CSVLoader": {"Conceptual guide": "https://python.langchain.com/v0.2/docs/concepts/", "How to load CSVs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_csv/", "ChatGPT plugin": "https://python.langchain.com/v0.2/docs/integrations/retrievers/chatgpt-plugin/", "Aerospike": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/aerospike/", "CSV": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/csv/", "Pebblo Safe DocumentLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pebblo/"}, "BaseChatModel": {"Contribute Integrations": "https://python.langchain.com/v0.2/docs/contributing/integrations/", "How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "deprecated": {"Contribute Integrations": "https://python.langchain.com/v0.2/docs/contributing/integrations/"}, "UnstructuredMarkdownLoader": {"langchain": "https://python.langchain.com/v0.2/docs/changes/changelog/langchain/", "How to load Markdown": "https://python.langchain.com/v0.2/docs/how_to/document_loader_markdown/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/", "StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/"}, "TavilySearchResults": {"Build an Agent": "https://python.langchain.com/v0.2/docs/tutorials/agents/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to debug your LLM apps": "https://python.langchain.com/v0.2/docs/how_to/debugging/", "Tavily Search": "https://python.langchain.com/v0.2/docs/integrations/tools/tavily_search/", "ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/"}, "WebBaseLoader": {"Build an Agent": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/", "RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "Infino": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "Zep Cloud": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep_cloud/", "Zep": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "Merge Documents Loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/merge_doc/", "WebBaseLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/web_base/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "FAISS": {"Build an Agent": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/", "How to better prompt when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_prompting/", "How to add values to a chain's state": "https://python.langchain.com/v0.2/docs/how_to/assign/", "How to inspect runnables": "https://python.langchain.com/v0.2/docs/how_to/inspect/", "How to deal with large databases when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_large_db/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to invoke runnables in parallel": "https://python.langchain.com/v0.2/docs/how_to/parallel/", "How to pass through arguments from one step to the next": "https://python.langchain.com/v0.2/docs/how_to/passthrough/", "How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "How to use a time-weighted vector store retriever": "https://python.langchain.com/v0.2/docs/how_to/time_weighted_vectorstore/", "How to create and query vector stores": "https://python.langchain.com/v0.2/docs/how_to/vectorstores/", "How to use a vectorstore as a retriever": "https://python.langchain.com/v0.2/docs/how_to/vectorstore_retriever/", "Caching": "https://python.langchain.com/v0.2/docs/how_to/caching_embeddings/", "How to combine results from multiple retrievers": "https://python.langchain.com/v0.2/docs/how_to/ensemble_retriever/", "How to select examples by maximal marginal relevance (MMR)": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_mmr/", "NVIDIA NIMs ": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "Cohere reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/", "Document Comparison": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "Faiss (Async)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "Faiss": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "VoyageAI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/", "OpenVINO Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/", "Cross Encoder Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "# Oracle Cloud Infrastructure Generative AI": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/"}, "OpenAIEmbeddings": {"Build an Agent": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/", "How to better prompt when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_prompting/", "How to add values to a chain's state": "https://python.langchain.com/v0.2/docs/how_to/assign/", "How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/", "How to do per-user retrieval": "https://python.langchain.com/v0.2/docs/how_to/qa_per_user/", "How to use few shot examples": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples/", "How to inspect runnables": "https://python.langchain.com/v0.2/docs/how_to/inspect/", "How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to use few shot examples in chat models": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples_chat/", "How to select examples by similarity": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_similarity/", "Text embedding models": "https://python.langchain.com/v0.2/docs/how_to/embed_text/", "How to deal with large databases when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_large_db/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to use the LangChain indexing API": "https://python.langchain.com/v0.2/docs/how_to/indexing/", "How to split text based on semantic similarity": "https://python.langchain.com/v0.2/docs/how_to/semantic-chunker/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to invoke runnables in parallel": "https://python.langchain.com/v0.2/docs/how_to/parallel/", "How to pass through arguments from one step to the next": "https://python.langchain.com/v0.2/docs/how_to/passthrough/", "How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "How to use a time-weighted vector store retriever": "https://python.langchain.com/v0.2/docs/how_to/time_weighted_vectorstore/", "How to create and query vector stores": "https://python.langchain.com/v0.2/docs/how_to/vectorstores/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/", "How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "How to use the Parent Document Retriever": "https://python.langchain.com/v0.2/docs/how_to/parent_document_retriever/", "How deal with high cardinality categoricals when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_high_cardinality/", "How to use a vectorstore as a retriever": "https://python.langchain.com/v0.2/docs/how_to/vectorstore_retriever/", "Caching": "https://python.langchain.com/v0.2/docs/how_to/caching_embeddings/", "How to combine results from multiple retrievers": "https://python.langchain.com/v0.2/docs/how_to/ensemble_retriever/", "How to select examples by maximal marginal relevance (MMR)": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_mmr/", "How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/", "Hybrid Search": "https://python.langchain.com/v0.2/docs/how_to/hybrid/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "Azure OpenAI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/azureopenai/", "Azure AI Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/", "RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "Kinetica Vectorstore based Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kinetica/", "JaguarDB Vector Database": "https://python.langchain.com/v0.2/docs/integrations/retrievers/jaguar/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/", "kNN": "https://python.langchain.com/v0.2/docs/integrations/retrievers/knn/", "DocArray": "https://python.langchain.com/v0.2/docs/integrations/retrievers/docarray_retriever/", "SVM": "https://python.langchain.com/v0.2/docs/integrations/retrievers/svm/", "Pinecone Hybrid Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/pinecone_hybrid_search/", "Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Milvus Hybrid Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/milvus_hybrid_search/", "FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "Milvus": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/milvus/", "PGVector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pgvector_self_query/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/", "Timescale Vector (Postgres) ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "Astra DB (Cassandra)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/astradb/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pinecone/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/", "Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "Qdrant": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/qdrant/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/", "Confident": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/", "Upstash Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/upstash/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/stores/cassandra/", "Document Comparison": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "LanceDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/", "Kinetica Vectorstore API": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "Hippo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "Rockset": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/", "Zilliz": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/", "Azure Cosmos DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "viking DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/", "Couchbase ": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/", "Typesense": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/", "Momento Vector Index (MVI)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "TiDB Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/", "Activeloop Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "Neo4j Vector Index": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "Lantern": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "DuckDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/", "Alibaba Cloud OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/", "StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "scikit-learn": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "DocArray HnswSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_hnsw/", "ClickHouse": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/", "Tigris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/", "China Mobile ECloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/", "Faiss (Async)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "USearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/", "KDB.AI": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "DocArray InMemorySearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/", "Postgres Embedding": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Faiss": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "Epsilla": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/", "Amazon Document DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "AnalyticDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/", "Hologres": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/", "Meilisearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/", "RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Psychic": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/", "Vector stores and retrievers": "https://python.langchain.com/v0.2/docs/tutorials/retrievers/"}, "RecursiveCharacterTextSplitter": {"Build an Agent": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to split code": "https://python.langchain.com/v0.2/docs/how_to/code_splitter/", "How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to recursively split text by characters": "https://python.langchain.com/v0.2/docs/how_to/recursive_text_splitter/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "How to split text by tokens ": "https://python.langchain.com/v0.2/docs/how_to/split_by_token/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/", "How to use the Parent Document Retriever": "https://python.langchain.com/v0.2/docs/how_to/parent_document_retriever/", "How to split Markdown by Headers": "https://python.langchain.com/v0.2/docs/how_to/markdown_header_metadata_splitter/", "How to split by HTML header ": "https://python.langchain.com/v0.2/docs/how_to/HTML_header_metadata_splitter/", "How to split by HTML sections": "https://python.langchain.com/v0.2/docs/how_to/HTML_section_aware_splitter/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "Cohere reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Google Vertex AI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "viking DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/", "Zep Cloud": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep_cloud/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Zep": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/", "Vearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "VoyageAI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/", "OpenVINO Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "Cross Encoder Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Source Code": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/"}, "create_retriever_tool": {"Build an Agent": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/"}, "create_tool_calling_agent": {"Build an Agent": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to use tools in a chain": "https://python.langchain.com/v0.2/docs/how_to/tools_chain/", "How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "How to debug your LLM apps": "https://python.langchain.com/v0.2/docs/how_to/debugging/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "Azure Container Apps dynamic sessions": "https://python.langchain.com/v0.2/docs/integrations/tools/azure_dynamic_sessions/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "AgentExecutor": {"Build an Agent": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to use tools in a chain": "https://python.langchain.com/v0.2/docs/how_to/tools_chain/", "How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to debug your LLM apps": "https://python.langchain.com/v0.2/docs/how_to/debugging/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "Tavily Search": "https://python.langchain.com/v0.2/docs/integrations/tools/tavily_search/", "Infobip": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/", "AskNews": "https://python.langchain.com/v0.2/docs/integrations/tools/asknews/", "Azure Container Apps dynamic sessions": "https://python.langchain.com/v0.2/docs/integrations/tools/azure_dynamic_sessions/", "Semantic Scholar API Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/", "Passio NutritionAI": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/", "Memorize": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "ArXiv": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "Exa Search": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "You.com Search": "https://python.langchain.com/v0.2/docs/integrations/tools/you/", "Ionic Shopping Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/streamlit/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/dataherald/", "Log, Trace, and Monitor": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "Python": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "SQL Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Gmail": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gmail/", "Slack": "https://python.langchain.com/v0.2/docs/integrations/toolkits/slack/", "Cassandra Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cassandra_database/", "Robocorp": "https://python.langchain.com/v0.2/docs/integrations/toolkits/robocorp/", "MultiOn": "https://python.langchain.com/v0.2/docs/integrations/toolkits/multion/", "Amadeus": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/", "Azure AI Services": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_ai_services/", "Polygon IO Toolkit": "https://python.langchain.com/v0.2/docs/integrations/toolkits/polygon/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "AIMessage": {"Build an Agent": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to do tool/function calling": "https://python.langchain.com/v0.2/docs/how_to/function_calling/", "How to add a human-in-the-loop for tools": "https://python.langchain.com/v0.2/docs/how_to/tools_human/", "How to use prompting alone (no tool calling) to do extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_parse/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to use a model to call tools": "https://python.langchain.com/v0.2/docs/how_to/tool_calling", "How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/", "How to add memory to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_memory/", "How to return structured data from a model": "https://python.langchain.com/v0.2/docs/how_to/structured_output/", "How to compose prompts together": "https://python.langchain.com/v0.2/docs/how_to/prompts_composition/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "Twitter (via Apify)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/twitter/", "Zep Open Source": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/", "Zep Cloud": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_cloud_memorystore/", "Google Imagen": "https://python.langchain.com/v0.2/docs/integrations/tools/google_imagen/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/", "ChatAnthropic": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "SQL Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "ChatGLM": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "ChatMessageHistory": {"Build an Agent": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add memory to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_memory/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "BaseChatMessageHistory": {"Build an Agent": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "RunnableWithMessageHistory": {"Build an Agent": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add message history": "https://python.langchain.com/v0.2/docs/how_to/message_history/", "How to add memory to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_memory/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "Google SQL for MySQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mysql/", "Google AlloyDB for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_alloydb/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "AWS DynamoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/aws_dynamodb/", "MongoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/mongodb_chat_message_history/", "SQL (SQLAlchemy)": "https://python.langchain.com/v0.2/docs/integrations/memory/sql_chat_message_history/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "Google El Carro Oracle": "https://python.langchain.com/v0.2/docs/integrations/memory/google_el_carro/", "SQLite": "https://python.langchain.com/v0.2/docs/integrations/memory/sqlite/", "Google SQL for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_pg/", "Google SQL for SQL Server": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "TiDB": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "Neo4jGraph": {"How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to map values to a graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_mapping/", "How to construct knowledge graphs": "https://python.langchain.com/v0.2/docs/how_to/graph_constructing/", "How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "Neo4j": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "Diffbot": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/", "Build a Question Answering application over a Graph Database": "https://python.langchain.com/v0.2/docs/tutorials/graph/"}, "AsyncCallbackManagerForToolRun": {"How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to create custom tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/"}, "CallbackManagerForToolRun": {"How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to create custom tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/"}, "BaseTool": {"How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to create custom tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/"}, "format_to_openai_function_messages": {"How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/"}, "OpenAIFunctionsAgentOutputParser": {"How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/"}, "convert_to_openai_function": {"How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to convert tools to OpenAI Functions": "https://python.langchain.com/v0.2/docs/how_to/tools_as_openai_functions/"}, "BSHTMLLoader": {"How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to load HTML": "https://python.langchain.com/v0.2/docs/how_to/document_loader_html/"}, "TokenTextSplitter": {"How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to split text by tokens ": "https://python.langchain.com/v0.2/docs/how_to/split_by_token/", "Azure AI Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/", "StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/"}, "Document": {"How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/", "How to create a custom Document Loader": "https://python.langchain.com/v0.2/docs/how_to/document_loader_custom/", "How to use the LangChain indexing API": "https://python.langchain.com/v0.2/docs/how_to/indexing/", "How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to create a custom Retriever": "https://python.langchain.com/v0.2/docs/how_to/custom_retriever/", "How to construct knowledge graphs": "https://python.langchain.com/v0.2/docs/how_to/graph_constructing/", "How to use a time-weighted vector store retriever": "https://python.langchain.com/v0.2/docs/how_to/time_weighted_vectorstore/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "How to load Markdown": "https://python.langchain.com/v0.2/docs/how_to/document_loader_markdown/", "How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Oracle AI Vector Search: Generate Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/oracleai/", "Kinetica Vectorstore based Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kinetica/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "ChatGPT plugin": "https://python.langchain.com/v0.2/docs/integrations/retrievers/chatgpt-plugin/", "Cohere RAG": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere/", "Weaviate Hybrid Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/weaviate-hybrid/", "BM25": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bm25/", "Qdrant Sparse Vector": "https://python.langchain.com/v0.2/docs/integrations/retrievers/qdrant-sparse/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "TF-IDF": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tf_idf/", "Milvus": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/milvus/", "PGVector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pgvector_self_query/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/weaviate_self_query/", "Vectara ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "DashVector": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/databricks_vector_search/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/opensearch_self_query/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/chroma_self_query/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "Timescale Vector (Postgres) ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "Astra DB (Cassandra)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/astradb/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/supabase_self_query/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/redis_self_query/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/myscale_self_query/", "Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Qdrant": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "Oracle AI Vector Search: Generate Summary": "https://python.langchain.com/v0.2/docs/integrations/tools/oracleai/", "Apify": "https://python.langchain.com/v0.2/docs/integrations/tools/apify/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Kinetica Vectorstore API": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "PGVector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvector/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/", "Annoy": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Oracle AI Vector Search: Vector Store": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/oracle/", "Neo4j Vector Index": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "Lantern": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "Google Firestore (Native Mode)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_firestore/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "Faiss (Async)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "PGVecto.rs": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "Postgres Embedding": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Faiss": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "Nuclia": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/nuclia_transformer/", "AI21SemanticTextSplitter": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/ai21_semantic_text_splitter/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "OpenAI metadata tagger": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/", "Doctran: extract properties": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_extract_properties/", "Google Translate": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_translate/", "Doctran: interrogate documents": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_interrogate_document/", "Doctran: language translation": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_translate_document/", "TensorFlow Datasets": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tensorflow_datasets/", "Google Cloud SQL for MySQL": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_cloud_sql_mysql/", "Airbyte Salesforce (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_salesforce/", "Airbyte CDK (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_cdk/", "Airbyte Stripe (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_stripe/", "Copy Paste": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/copypaste/", "Airbyte Typeform (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_typeform/", "Apify Dataset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/", "Google Firestore in Datastore Mode": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_datastore/", "Oracle AI Vector Search: Document Processing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/oracleai/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "Airbyte Hubspot (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_hubspot/", "Airbyte Gong (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_gong/", "Google Memorystore for Redis": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_memorystore_redis/", "Google Bigtable": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_bigtable/", "Google Cloud SQL for SQL server": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_cloud_sql_mssql/", "Google El Carro for Oracle Workloads": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_el_carro/", "Airbyte Shopify (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_shopify/", "Airbyte Zendesk Support (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_zendesk_support/", "Google Spanner": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_spanner/", "Azure Cosmos DB for Apache Gremlin": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/", "SageMakerEndpoint": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/", "Vector stores and retrievers": "https://python.langchain.com/v0.2/docs/tutorials/retrievers/"}, "CharacterTextSplitter": {"How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to split by character": "https://python.langchain.com/v0.2/docs/how_to/character_text_splitter/", "How to use the LangChain indexing API": "https://python.langchain.com/v0.2/docs/how_to/indexing/", "How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "How to create and query vector stores": "https://python.langchain.com/v0.2/docs/how_to/vectorstores/", "How to split text by tokens ": "https://python.langchain.com/v0.2/docs/how_to/split_by_token/", "How to use a vectorstore as a retriever": "https://python.langchain.com/v0.2/docs/how_to/vectorstore_retriever/", "Caching": "https://python.langchain.com/v0.2/docs/how_to/caching_embeddings/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Azure AI Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/", "Kinetica Vectorstore based Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kinetica/", "JaguarDB Vector Database": "https://python.langchain.com/v0.2/docs/integrations/retrievers/jaguar/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/singlestoredb/", "Vectara ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "Confident": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "Upstash Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/upstash/", "VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "Document Comparison": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "LanceDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "Kinetica Vectorstore API": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/", "SQLite-VSS": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/", "Vald": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "DashVector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/", "Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/", "ScaNN": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/", "Hippo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vectara/", "Vespa": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/", "Rockset": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/", "Zilliz": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/", "Azure Cosmos DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "Annoy": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Couchbase ": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/", "Typesense": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/", "Momento Vector Index (MVI)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "TiDB Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/", "Relyt": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/relyt/", "Activeloop Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "vlite": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vlite/", "Neo4j Vector Index": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "Lantern": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "Tair": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/", "DuckDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/", "Alibaba Cloud OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/", "Clarifai": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clarifai/", "scikit-learn": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "DocArray HnswSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_hnsw/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/", "TileDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/", "ClickHouse": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/", "Google Memorystore for Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_memorystore_redis/", "Qdrant": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/qdrant/", "Tigris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/", "China Mobile ECloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/", "Bagel": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bagel/", "Baidu Cloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/", "AwaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/awadb/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/", "SurrealDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pinecone/", "Faiss (Async)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "BagelDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bageldb/", "ManticoreSearch VectorStore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/manticore_search/", "USearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/", "Milvus": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/milvus/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "PGVecto.rs": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "Marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "DocArray InMemorySearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/", "Postgres Embedding": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/", "Intel's Visual Data Management System (VDMS)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Faiss": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "Epsilla": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/", "Amazon Document DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "SemaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/", "AnalyticDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/", "Hologres": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/", "Baidu VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/", "Meilisearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/", "Psychic": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "Manifest": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/"}, "PyPDFLoader": {"How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/", "Document Comparison": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "Google Vertex AI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "KDB.AI": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "Merge Documents Loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/merge_doc/", "Google Cloud Storage File": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_cloud_storage_file/"}, "PyMuPDFLoader": {"How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/"}, "MathpixPDFLoader": {"How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/"}, "UnstructuredPDFLoader": {"How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/"}, "OnlinePDFLoader": {"How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/", "Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/"}, "PyPDFium2Loader": {"How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/"}, "PDFMinerLoader": {"How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/"}, "PDFMinerPDFasHTMLLoader": {"How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/"}, "PyPDFDirectoryLoader": {"How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/"}, "PDFPlumberLoader": {"How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/"}, "AmazonTextractPDFLoader": {"How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon Textract ": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/amazon_textract/"}, "AzureAIDocumentIntelligenceLoader": {"How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/", "How to load Microsoft Office files": "https://python.langchain.com/v0.2/docs/how_to/document_loader_office_file/", "Microsoft Word": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_word/", "Microsoft Excel": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_excel/", "Microsoft PowerPoint": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_powerpoint/", "Azure AI Document Intelligence": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_document_intelligence/"}, "SQLDatabase": {"How to better prompt when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_prompting/", "How to deal with large databases when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_large_db/", "How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "How to do query validation as part of SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_query_checking/", "SQL Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "Rebuff": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/"}, "create_sql_query_chain": {"How to better prompt when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_prompting/", "How to deal with large databases when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_large_db/", "How to do query validation as part of SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_query_checking/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/"}, "FewShotPromptTemplate": {"How to better prompt when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_prompting/", "How to select examples by n-gram overlap": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_ngram/", "How to select examples by length": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_length_based/", "How to use example selectors": "https://python.langchain.com/v0.2/docs/how_to/example_selectors/", "How to use few shot examples": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples/", "How to select examples by similarity": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_similarity/", "How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "How to select examples by maximal marginal relevance (MMR)": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_mmr/", "Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "SemanticSimilarityExampleSelector": {"How to better prompt when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_prompting/", "How to use few shot examples": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples/", "How to use few shot examples in chat models": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples_chat/", "How to select examples by similarity": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_similarity/", "How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "How to select examples by maximal marginal relevance (MMR)": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_mmr/"}, "PydanticOutputParser": {"How to use output parsers to parse an LLM response into structured format": "https://python.langchain.com/v0.2/docs/how_to/output_parser_structured/", "How to use prompting alone (no tool calling) to do extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_parse/", "How to use the output-fixing parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_fixing/", "How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/", "How to return structured data from a model": "https://python.langchain.com/v0.2/docs/how_to/structured_output/", "Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "OpenAI": {"How to use output parsers to parse an LLM response into structured format": "https://python.langchain.com/v0.2/docs/how_to/output_parser_structured/", "How to reorder retrieved results to mitigate the \"lost in the middle\" effect": "https://python.langchain.com/v0.2/docs/how_to/long_context_reorder/", "How to add fallbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/fallbacks/", "How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/", "How to stream responses from an LLM": "https://python.langchain.com/v0.2/docs/how_to/streaming_llm/", "How to cache LLM responses": "https://python.langchain.com/v0.2/docs/how_to/llm_caching/", "How to track token usage for LLMs": "https://python.langchain.com/v0.2/docs/how_to/llm_token_usage_tracking/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Milvus": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/milvus_self_query/", "PGVector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pgvector_self_query/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Vectara ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/databricks_vector_search/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/opensearch_self_query/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/elasticsearch_self_query/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/chroma_self_query/", "Timescale Vector (Postgres) ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "Astra DB (Cassandra)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/astradb/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/supabase_self_query/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/redis_self_query/", "Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "Qdrant": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/llms/openai/", "Google Jobs": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "Google Serper": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "Human as a tool": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "Google Finance": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "AWS Lambda": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "Google Drive": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "OpenWeatherMap": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "Search Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/search_tools/", "Eleven Labs Text2Speech": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "NVIDIA Riva: ASR and TTS": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/", "Lemon Agent": "https://python.langchain.com/v0.2/docs/integrations/tools/lemonai/", "GraphQL": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "Gradio": "https://python.langchain.com/v0.2/docs/integrations/tools/gradio_tools/", "SceneXplain": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "Dall-E Image Generator": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "Ionic Shopping Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "Mot\u00f6rhead": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/", "Confident": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Fiddler": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "SageMaker Tracking": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Label Studio": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "Comet Tracing": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "Argilla": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/streamlit/", "Trubrics": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "Infino": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "DSPy": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "LangChain Decorators \u2728": "https://python.langchain.com/v0.2/docs/integrations/providers/langchain_decorators/", "Rebuff": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "MLflow": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Helicone": "https://python.langchain.com/v0.2/docs/integrations/providers/helicone/", "Shale Protocol": "https://python.langchain.com/v0.2/docs/integrations/providers/shaleprotocol/", "WhyLabs": "https://python.langchain.com/v0.2/docs/integrations/providers/whylabs_profiling/", "WandB Tracing": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "ClearML": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "Ray Serve": "https://python.langchain.com/v0.2/docs/integrations/providers/ray_serve/", "Chat Over Documents with Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "CSV": "https://python.langchain.com/v0.2/docs/integrations/toolkits/csv/", "Xorbits": "https://python.langchain.com/v0.2/docs/integrations/toolkits/xorbits/", "Jira": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/", "Spark Dataframe": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark/", "Azure Cognitive Services": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_cognitive_services/", "Natural Language APIs": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/", "Steam Game Recommendation & Game Details": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/", "JSON": "https://python.langchain.com/v0.2/docs/integrations/toolkits/json/", "ClickUp": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/", "Office365": "https://python.langchain.com/v0.2/docs/integrations/toolkits/office365/", "Pandas Dataframe": "https://python.langchain.com/v0.2/docs/integrations/toolkits/pandas/", "NASA": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/", "Azure AI Services": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_ai_services/", "OpenAPI": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/", "Gitlab": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/", "Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "Marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Amazon Document DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "VoyageAI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "Psychic": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "Amazon Textract ": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/amazon_textract/", "NetworkX": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Layerup Security": "https://python.langchain.com/v0.2/docs/integrations/llms/layerup_security/", "Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "SimpleJsonOutputParser": {"How to use output parsers to parse an LLM response into structured format": "https://python.langchain.com/v0.2/docs/how_to/output_parser_structured/"}, "AsyncCallbackHandler": {"How to use callbacks in async environments": "https://python.langchain.com/v0.2/docs/how_to/callbacks_async/", "Bedrock": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/"}, "BaseCallbackHandler": {"How to use callbacks in async environments": "https://python.langchain.com/v0.2/docs/how_to/callbacks_async/", "How to propagate callbacks constructor": "https://python.langchain.com/v0.2/docs/how_to/callbacks_constructor/", "How to attach callbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/callbacks_attach/", "How to create custom callback handlers": "https://python.langchain.com/v0.2/docs/how_to/custom_callbacks/", "How to pass callbacks in at runtime": "https://python.langchain.com/v0.2/docs/how_to/callbacks_runtime/"}, "LLMResult": {"How to use callbacks in async environments": "https://python.langchain.com/v0.2/docs/how_to/callbacks_async/", "How to propagate callbacks constructor": "https://python.langchain.com/v0.2/docs/how_to/callbacks_constructor/", "How to attach callbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/callbacks_attach/", "How to pass callbacks in at runtime": "https://python.langchain.com/v0.2/docs/how_to/callbacks_runtime/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/"}, "RunnableParallel": {"How to add values to a chain's state": "https://python.langchain.com/v0.2/docs/how_to/assign/", "How to invoke runnables in parallel": "https://python.langchain.com/v0.2/docs/how_to/parallel/", "How to pass through arguments from one step to the next": "https://python.langchain.com/v0.2/docs/how_to/passthrough/", "How to add message history": "https://python.langchain.com/v0.2/docs/how_to/message_history/", "How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/", "How to chain runnables": "https://python.langchain.com/v0.2/docs/how_to/sequence/", "Exa Search": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/"}, "RunnablePassthrough": {"How to add values to a chain's state": "https://python.langchain.com/v0.2/docs/how_to/assign/", "How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/", "How to do per-user retrieval": "https://python.langchain.com/v0.2/docs/how_to/qa_per_user/", "How to inspect runnables": "https://python.langchain.com/v0.2/docs/how_to/inspect/", "How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to do tool/function calling": "https://python.langchain.com/v0.2/docs/how_to/function_calling/", "How to add a human-in-the-loop for tools": "https://python.langchain.com/v0.2/docs/how_to/tools_human/", "How to deal with large databases when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_large_db/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to map values to a graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_mapping/", "How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to use a model to call tools": "https://python.langchain.com/v0.2/docs/how_to/tool_calling", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to add default invocation args to a Runnable": "https://python.langchain.com/v0.2/docs/how_to/binding/", "How to create a dynamic (self-constructing) chain": "https://python.langchain.com/v0.2/docs/how_to/dynamic_chain/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to invoke runnables in parallel": "https://python.langchain.com/v0.2/docs/how_to/parallel/", "How to pass through arguments from one step to the next": "https://python.langchain.com/v0.2/docs/how_to/passthrough/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "How to add memory to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_memory/", "How deal with high cardinality categoricals when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_high_cardinality/", "How to add ad-hoc tool calling capability to LLMs and Chat Models": "https://python.langchain.com/v0.2/docs/how_to/tools_prompting/", "LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/", "Hybrid Search": "https://python.langchain.com/v0.2/docs/how_to/hybrid/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "NVIDIA NIMs ": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "You.com": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "AskNews": "https://python.langchain.com/v0.2/docs/integrations/retrievers/asknews/", "Tavily Search API": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tavily/", "Milvus Hybrid Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/milvus_hybrid_search/", "Exa Search": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "DSPy": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "# Oracle Cloud Infrastructure Generative AI": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/", "Vector stores and retrievers": "https://python.langchain.com/v0.2/docs/tutorials/retrievers/"}, "StrOutputParser": {"How to add values to a chain's state": "https://python.langchain.com/v0.2/docs/how_to/assign/", "How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/", "How to do per-user retrieval": "https://python.langchain.com/v0.2/docs/how_to/qa_per_user/", "How to inspect runnables": "https://python.langchain.com/v0.2/docs/how_to/inspect/", "How to add fallbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/fallbacks/", "How to map values to a graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_mapping/", "How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to add default invocation args to a Runnable": "https://python.langchain.com/v0.2/docs/how_to/binding/", "How to create a dynamic (self-constructing) chain": "https://python.langchain.com/v0.2/docs/how_to/dynamic_chain/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to invoke runnables in parallel": "https://python.langchain.com/v0.2/docs/how_to/parallel/", "How to pass through arguments from one step to the next": "https://python.langchain.com/v0.2/docs/how_to/passthrough/", "How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "How to run custom functions": "https://python.langchain.com/v0.2/docs/how_to/functions/", "How to chain runnables": "https://python.langchain.com/v0.2/docs/how_to/sequence/", "Hybrid Search": "https://python.langchain.com/v0.2/docs/how_to/hybrid/", "How to do query validation as part of SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_query_checking/", "Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "NVIDIA NIMs ": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "You.com": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "AskNews": "https://python.langchain.com/v0.2/docs/integrations/retrievers/asknews/", "Tavily Search API": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tavily/", "Milvus Hybrid Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/milvus_hybrid_search/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "ChatOllama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "NVIDIA NIMs": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "Fiddler": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "DSPy": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Volc Engine Maas": "https://python.langchain.com/v0.2/docs/integrations/llms/volcengine_maas/", "# Oracle Cloud Infrastructure Generative AI": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "AI21LLM": "https://python.langchain.com/v0.2/docs/integrations/llms/ai21/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/", "Build a Simple LLM Application": "https://python.langchain.com/v0.2/docs/tutorials/llm_chain/"}, "RunnableBranch": {"How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/"}, "cosine_similarity": {"How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/"}, "tool": {"How to use tools in a chain": "https://python.langchain.com/v0.2/docs/how_to/tools_chain/", "How to do tool/function calling": "https://python.langchain.com/v0.2/docs/how_to/function_calling/", "How to add a human-in-the-loop for tools": "https://python.langchain.com/v0.2/docs/how_to/tools_human/", "How to create custom tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/", "How to pass multimodal data directly to models": "https://python.langchain.com/v0.2/docs/how_to/multimodal_inputs/", "How to use a model to call tools": "https://python.langchain.com/v0.2/docs/how_to/tool_calling", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to add ad-hoc tool calling capability to LLMs and Chat Models": "https://python.langchain.com/v0.2/docs/how_to/tools_prompting/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "Exa Search": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "ChatTongyi": "https://python.langchain.com/v0.2/docs/integrations/chat/tongyi/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Log, Trace, and Monitor": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "JSONFormer": "https://python.langchain.com/v0.2/docs/integrations/llms/jsonformer_experimental/"}, "ConfigurableField": {"How to do per-user retrieval": "https://python.langchain.com/v0.2/docs/how_to/qa_per_user/", "How to configure runtime chain internals": "https://python.langchain.com/v0.2/docs/how_to/configure/", "LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/", "How to combine results from multiple retrievers": "https://python.langchain.com/v0.2/docs/how_to/ensemble_retriever/", "Hybrid Search": "https://python.langchain.com/v0.2/docs/how_to/hybrid/"}, "NGramOverlapExampleSelector": {"How to select examples by n-gram overlap": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_ngram/"}, "get_openai_callback": {"How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "How to run custom functions": "https://python.langchain.com/v0.2/docs/how_to/functions/", "How to track token usage for LLMs": "https://python.langchain.com/v0.2/docs/how_to/llm_token_usage_tracking/", "AzureChatOpenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/azure_chat_openai/"}, "load_tools": {"How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "ChatGPT Plugins": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "Google Jobs": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "Human as a tool": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "Google Finance": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "AWS Lambda": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "Google Drive": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "Requests": "https://python.langchain.com/v0.2/docs/integrations/tools/requests/", "OpenWeatherMap": "https://python.langchain.com/v0.2/docs/integrations/providers/openweathermap/", "Memorize": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "Search Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/search_tools/", "Eleven Labs Text2Speech": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "ArXiv": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "GraphQL": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "SceneXplain": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "Dall-E Image Generator": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "SageMaker Tracking": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Comet Tracing": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "Argilla": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/streamlit/", "SerpAPI": "https://python.langchain.com/v0.2/docs/integrations/providers/serpapi/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Golden": "https://python.langchain.com/v0.2/docs/integrations/providers/golden/", "Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "Wolfram Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/wolfram_alpha/", "MLflow": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "DataForSEO": "https://python.langchain.com/v0.2/docs/integrations/providers/dataforseo/", "SearxNG Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/searx/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Stack Exchange": "https://python.langchain.com/v0.2/docs/integrations/providers/stackexchange/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "WandB Tracing": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "ClearML": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/dataherald/", "Amazon API Gateway": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/"}, "get_bedrock_anthropic_callback": {"How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/"}, "CallbackManagerForLLMRun": {"How to create a custom LLM class": "https://python.langchain.com/v0.2/docs/how_to/custom_llm/", "How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "LLM": {"How to create a custom LLM class": "https://python.langchain.com/v0.2/docs/how_to/custom_llm/"}, "GenerationChunk": {"How to create a custom LLM class": "https://python.langchain.com/v0.2/docs/how_to/custom_llm/"}, "BaseLoader": {"How to create a custom Document Loader": "https://python.langchain.com/v0.2/docs/how_to/document_loader_custom/", "How to use the LangChain indexing API": "https://python.langchain.com/v0.2/docs/how_to/indexing/"}, "BaseBlobParser": {"How to create a custom Document Loader": "https://python.langchain.com/v0.2/docs/how_to/document_loader_custom/"}, "Blob": {"How to create a custom Document Loader": "https://python.langchain.com/v0.2/docs/how_to/document_loader_custom/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Cloud Document AI": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_docai/"}, "FileSystemBlobLoader": {"How to create a custom Document Loader": "https://python.langchain.com/v0.2/docs/how_to/document_loader_custom/"}, "GenericLoader": {"How to create a custom Document Loader": "https://python.langchain.com/v0.2/docs/how_to/document_loader_custom/", "Grobid": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/grobid/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Source Code": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/"}, "LengthBasedExampleSelector": {"How to select examples by length": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_length_based/"}, "BaseExampleSelector": {"How to use example selectors": "https://python.langchain.com/v0.2/docs/how_to/example_selectors/"}, "Language": {"How to split code": "https://python.langchain.com/v0.2/docs/how_to/code_splitter/", "Source Code": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/"}, "Chroma": {"How to use few shot examples": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples/", "How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to use few shot examples in chat models": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples_chat/", "How to select examples by similarity": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_similarity/", "How to reorder retrieved results to mitigate the \"lost in the middle\" effect": "https://python.langchain.com/v0.2/docs/how_to/long_context_reorder/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "How to create and query vector stores": "https://python.langchain.com/v0.2/docs/how_to/vectorstores/", "How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/", "How to use the Parent Document Retriever": "https://python.langchain.com/v0.2/docs/how_to/parent_document_retriever/", "How deal with high cardinality categoricals when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_high_cardinality/", "How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/", "RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/", "Confident": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "Psychic": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/", "Vector stores and retrievers": "https://python.langchain.com/v0.2/docs/tutorials/retrievers/"}, "PydanticToolsParser": {"How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to do tool/function calling": "https://python.langchain.com/v0.2/docs/how_to/function_calling/", "How to deal with large databases when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_large_db/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to use a model to call tools": "https://python.langchain.com/v0.2/docs/how_to/tool_calling", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/"}, "chain": {"How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to create a dynamic (self-constructing) chain": "https://python.langchain.com/v0.2/docs/how_to/dynamic_chain/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "How to run custom functions": "https://python.langchain.com/v0.2/docs/how_to/functions/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/"}, "RecursiveJsonSplitter": {"How to split JSON data": "https://python.langchain.com/v0.2/docs/how_to/recursive_json_splitter/"}, "FewShotChatMessagePromptTemplate": {"How to use few shot examples in chat models": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples_chat/", "Fiddler": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/"}, "ToolMessage": {"How to do tool/function calling": "https://python.langchain.com/v0.2/docs/how_to/function_calling/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "How to use a model to call tools": "https://python.langchain.com/v0.2/docs/how_to/tool_calling", "How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to return structured data from a model": "https://python.langchain.com/v0.2/docs/how_to/structured_output/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "ChatAnthropic": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/"}, "XMLOutputParser": {"How to parse XML output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_xml/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/"}, "Runnable": {"How to add a human-in-the-loop for tools": "https://python.langchain.com/v0.2/docs/how_to/tools_human/", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to create a dynamic (self-constructing) chain": "https://python.langchain.com/v0.2/docs/how_to/dynamic_chain/"}, "StructuredTool": {"How to create custom tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/", "Infobip": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/"}, "ToolException": {"How to create custom tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/"}, "HuggingFaceEmbeddings": {"Text embedding models": "https://python.langchain.com/v0.2/docs/how_to/embed_text/", "How to reorder retrieved results to mitigate the \"lost in the middle\" effect": "https://python.langchain.com/v0.2/docs/how_to/long_context_reorder/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "Sentence Transformers on Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sentence_transformers/", "LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "Snowflake": "https://python.langchain.com/v0.2/docs/integrations/providers/snowflake/", "VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "Vald": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/", "ScaNN": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "Annoy": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Oracle AI Vector Search: Vector Store": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/oracle/", "TileDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/", "SurrealDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/", "Faiss (Async)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "Aerospike": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/aerospike/", "Intel's Visual Data Management System (VDMS)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/", "Faiss": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "Infinispan": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/infinispanvs/", "SemaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/", "Vearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/", "Cross Encoder Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/"}, "LongContextReorder": {"How to reorder retrieved results to mitigate the \"lost in the middle\" effect": "https://python.langchain.com/v0.2/docs/how_to/long_context_reorder/", "LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/"}, "create_stuff_documents_chain": {"How to reorder retrieved results to mitigate the \"lost in the middle\" effect": "https://python.langchain.com/v0.2/docs/how_to/long_context_reorder/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "BaseMessage": {"How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to propagate callbacks constructor": "https://python.langchain.com/v0.2/docs/how_to/callbacks_constructor/", "How to attach callbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/callbacks_attach/", "How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/", "How to pass callbacks in at runtime": "https://python.langchain.com/v0.2/docs/how_to/callbacks_runtime/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "WeChat": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/wechat/", "Discord": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/"}, "DatetimeOutputParser": {"How to add fallbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/fallbacks/"}, "CypherQueryCorrector": {"How to map values to a graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_mapping/"}, "Schema": {"How to map values to a graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_mapping/"}, "set_llm_cache": {"How to cache chat model responses": "https://python.langchain.com/v0.2/docs/how_to/chat_model_caching/", "How to cache LLM responses": "https://python.langchain.com/v0.2/docs/how_to/llm_caching/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "DSPy": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/providers/mongodb_atlas/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/redis/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/", "Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/"}, "InMemoryCache": {"How to cache chat model responses": "https://python.langchain.com/v0.2/docs/how_to/chat_model_caching/", "How to cache LLM responses": "https://python.langchain.com/v0.2/docs/how_to/llm_caching/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/"}, "SQLiteCache": {"How to cache chat model responses": "https://python.langchain.com/v0.2/docs/how_to/chat_model_caching/", "How to cache LLM responses": "https://python.langchain.com/v0.2/docs/how_to/llm_caching/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "DSPy": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/"}, "create_sql_agent": {"How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "SQL Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/"}, "PythonAstREPLTool": {"How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/"}, "JsonOutputKeyToolsParser": {"How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/"}, "create_pandas_dataframe_agent": {"How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "Airbyte Question Answering": "https://python.langchain.com/v0.2/docs/integrations/toolkits/airbyte_structured_qa/", "Pandas Dataframe": "https://python.langchain.com/v0.2/docs/integrations/toolkits/pandas/"}, "create_retrieval_chain": {"How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "create_history_aware_retriever": {"How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "OutputFixingParser": {"How to use the output-fixing parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_fixing/", "How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/"}, "FunctionMessage": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "AIMessageChunk": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/", "How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/"}, "FunctionMessageChunk": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "HumanMessageChunk": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "SystemMessageChunk": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "ToolMessageChunk": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "AsyncCallbackManagerForLLMRun": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "SimpleChatModel": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "ChatGeneration": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/", "How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/"}, "ChatGenerationChunk": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "ChatResult": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "run_in_executor": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "MoveFileTool": {"How to convert tools to OpenAI Functions": "https://python.langchain.com/v0.2/docs/how_to/tools_as_openai_functions/"}, "RunnableConfig": {"How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to run custom functions": "https://python.langchain.com/v0.2/docs/how_to/functions/", "How to add ad-hoc tool calling capability to LLMs and Chat Models": "https://python.langchain.com/v0.2/docs/how_to/tools_prompting/", "LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/"}, "ToolCall": {"How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/"}, "SQLRecordManager": {"How to use the LangChain indexing API": "https://python.langchain.com/v0.2/docs/how_to/indexing/"}, "index": {"How to use the LangChain indexing API": "https://python.langchain.com/v0.2/docs/how_to/indexing/"}, "create_openai_tools_agent": {"How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "Log, Trace, and Monitor": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "SQL Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Slack": "https://python.langchain.com/v0.2/docs/integrations/toolkits/slack/", "Cassandra Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cassandra_database/"}, "SemanticChunker": {"How to split text based on semantic similarity": "https://python.langchain.com/v0.2/docs/how_to/semantic-chunker/"}, "JsonOutputParser": {"How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to add ad-hoc tool calling capability to LLMs and Chat Models": "https://python.langchain.com/v0.2/docs/how_to/tools_prompting/", "How to parse JSON output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_json/"}, "InMemoryByteStore": {"How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "Caching": "https://python.langchain.com/v0.2/docs/how_to/caching_embeddings/", "InMemoryByteStore": "https://python.langchain.com/v0.2/docs/integrations/stores/in_memory/"}, "TextLoader": {"How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "How to load documents from a directory": "https://python.langchain.com/v0.2/docs/how_to/document_loader_directory/", "How to create and query vector stores": "https://python.langchain.com/v0.2/docs/how_to/vectorstores/", "How to use the Parent Document Retriever": "https://python.langchain.com/v0.2/docs/how_to/parent_document_retriever/", "How to use a vectorstore as a retriever": "https://python.langchain.com/v0.2/docs/how_to/vectorstore_retriever/", "Caching": "https://python.langchain.com/v0.2/docs/how_to/caching_embeddings/", "Azure AI Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/", "Kinetica Vectorstore based Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kinetica/", "JaguarDB Vector Database": "https://python.langchain.com/v0.2/docs/integrations/retrievers/jaguar/", "LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "Cohere reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/singlestoredb/", "FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "Vectara ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "Confident": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "Upstash Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/upstash/", "VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "Chat Over Documents with Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "LanceDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "Kinetica Vectorstore API": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/", "SQLite-VSS": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/", "Vald": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "DashVector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/", "Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/", "ScaNN": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/", "Hippo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vectara/", "Vespa": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/", "Rockset": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/", "Zilliz": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/", "Azure Cosmos DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "viking DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/", "Annoy": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Couchbase ": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/", "Typesense": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/", "Momento Vector Index (MVI)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "TiDB Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/", "Relyt": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/relyt/", "Atlas": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/atlas/", "Activeloop Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "vlite": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vlite/", "Neo4j Vector Index": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "Lantern": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "Tair": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/", "DuckDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/", "Alibaba Cloud OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/", "Clarifai": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clarifai/", "scikit-learn": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "DocArray HnswSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_hnsw/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/", "TileDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/", "ClickHouse": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/", "Google Memorystore for Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_memorystore_redis/", "Qdrant": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/qdrant/", "Tigris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/", "China Mobile ECloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/", "Bagel": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bagel/", "Baidu Cloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/", "AwaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/awadb/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/", "SurrealDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pinecone/", "Faiss (Async)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "BagelDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bageldb/", "ManticoreSearch VectorStore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/manticore_search/", "USearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/", "Milvus": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/milvus/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "PGVecto.rs": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "Marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "DocArray InMemorySearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/", "Postgres Embedding": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/", "Intel's Visual Data Management System (VDMS)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Faiss": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "Epsilla": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/", "Amazon Document DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "SemaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/", "AnalyticDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/", "Hologres": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/", "Baidu VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/", "Vearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/", "Meilisearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/", "VoyageAI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/", "OpenVINO Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/", "Cross Encoder Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/"}, "MultiVectorRetriever": {"How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/"}, "SearchType": {"How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "Zep Open Source": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/"}, "SQLChatMessageHistory": {"How to add message history": "https://python.langchain.com/v0.2/docs/how_to/message_history/", "SQL (SQLAlchemy)": "https://python.langchain.com/v0.2/docs/integrations/memory/sql_chat_message_history/", "SQLite": "https://python.langchain.com/v0.2/docs/integrations/providers/sqlite/"}, "ConfigurableFieldSpec": {"How to add message history": "https://python.langchain.com/v0.2/docs/how_to/message_history/"}, "Ollama": {"Run LLMs locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/", "How to add ad-hoc tool calling capability to LLMs and Chat Models": "https://python.langchain.com/v0.2/docs/how_to/tools_prompting/", "Ollama": "https://python.langchain.com/v0.2/docs/integrations/llms/ollama/"}, "CallbackManager": {"Run LLMs locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/", "ChatLiteLLM": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/", "GPTRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/", "ChatLiteLLMRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/", "ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "Titan Takeoff": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/"}, "StreamingStdOutCallbackHandler": {"Run LLMs locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/", "DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/chat/deepinfra/", "ChatLiteLLM": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/", "ChatEverlyAI": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/", "GPTRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/", "ChatLiteLLMRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/", "ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/llms/gpt4all/", "Arthur": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/", "Chat Over Documents with Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "TextGen": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "Bedrock": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "Titan Takeoff": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "C Transformers": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/", "ExLlamaV2": "https://python.langchain.com/v0.2/docs/integrations/llms/exllamav2/", "Huggingface Endpoints": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/", "Replicate": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/"}, "LlamaCpp": {"Run LLMs locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/", "Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/"}, "GPT4All": {"Run LLMs locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/", "PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/llms/gpt4all/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/"}, "Llamafile": {"Run LLMs locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/", "Llamafile": "https://python.langchain.com/v0.2/docs/integrations/llms/llamafile/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/"}, "LLMChain": {"Run LLMs locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/", "Clarifai": "https://python.langchain.com/v0.2/docs/integrations/llms/clarifai/", "RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "Memorize": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "Dall-E Image Generator": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "Mot\u00f6rhead": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/", "Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "Context": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "SageMaker Tracking": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Argilla": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "Rebuff": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Prediction Guard": "https://python.langchain.com/v0.2/docs/integrations/llms/predictionguard/", "MLflow": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Shale Protocol": "https://python.langchain.com/v0.2/docs/integrations/providers/shaleprotocol/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "Ray Serve": "https://python.langchain.com/v0.2/docs/integrations/providers/ray_serve/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Chat Over Documents with Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "Baseten": "https://python.langchain.com/v0.2/docs/integrations/llms/baseten/", "StochasticAI": "https://python.langchain.com/v0.2/docs/integrations/llms/stochasticai/", "Solar": "https://python.langchain.com/v0.2/docs/integrations/llms/solar/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "IPEX-LLM": "https://python.langchain.com/v0.2/docs/integrations/llms/ipex_llm/", "Banana": "https://python.langchain.com/v0.2/docs/integrations/llms/banana/", "Alibaba Cloud PAI EAS": "https://python.langchain.com/v0.2/docs/integrations/llms/alibabacloud_pai_eas_endpoint/", "OpenLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/openllm/", "OctoAI": "https://python.langchain.com/v0.2/docs/integrations/llms/octoai/", "Writer": "https://python.langchain.com/v0.2/docs/integrations/llms/writer/", "Modal": "https://python.langchain.com/v0.2/docs/integrations/llms/modal/", "TextGen": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "Xorbits Inference (Xinference)": "https://python.langchain.com/v0.2/docs/integrations/llms/xinference/", "Nebula (Symbl.ai)": "https://python.langchain.com/v0.2/docs/integrations/llms/symblai_nebula/", "DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/llms/deepinfra/", "NLP Cloud": "https://python.langchain.com/v0.2/docs/integrations/llms/nlpcloud/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/llms/gpt4all/", "ForefrontAI": "https://python.langchain.com/v0.2/docs/integrations/llms/forefrontai/", "MosaicML": "https://python.langchain.com/v0.2/docs/integrations/llms/mosaicml/", "CerebriumAI": "https://python.langchain.com/v0.2/docs/integrations/llms/cerebriumai/", "Predibase": "https://python.langchain.com/v0.2/docs/integrations/llms/predibase/", "GigaChat": "https://python.langchain.com/v0.2/docs/integrations/llms/gigachat/", "# Oracle Cloud Infrastructure Generative AI": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Aphrodite Engine": "https://python.langchain.com/v0.2/docs/integrations/llms/aphrodite/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "Javelin AI Gateway Tutorial": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "C Transformers": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/", "vLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/", "Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/", "Minimax": "https://python.langchain.com/v0.2/docs/integrations/llms/minimax/", "Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/llms/yuan2/", "Huggingface Endpoints": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/", "Runhouse": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/", "Anyscale": "https://python.langchain.com/v0.2/docs/integrations/llms/anyscale/", "YandexGPT": "https://python.langchain.com/v0.2/docs/integrations/llms/yandex/", "GooseAI": "https://python.langchain.com/v0.2/docs/integrations/llms/gooseai/", "OpenLM": "https://python.langchain.com/v0.2/docs/integrations/llms/openlm/", "Cloudflare Workers AI": "https://python.langchain.com/v0.2/docs/integrations/llms/cloudflare_workersai/", "CTranslate2": "https://python.langchain.com/v0.2/docs/integrations/llms/ctranslate2/", "PipelineAI": "https://python.langchain.com/v0.2/docs/integrations/llms/pipelineai/", "ChatGLM": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/", "Gradient": "https://python.langchain.com/v0.2/docs/integrations/llms/gradient/", "Petals": "https://python.langchain.com/v0.2/docs/integrations/llms/petals/", "Replicate": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/"}, "ConditionalPromptSelector": {"Run LLMs locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/"}, "HubRunnable": {"How to configure runtime chain internals": "https://python.langchain.com/v0.2/docs/how_to/configure/"}, "ContextualCompressionRetriever": {"How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "Cohere reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/", "VoyageAI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/", "OpenVINO Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "Cross Encoder Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/"}, "LLMChainExtractor": {"How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/"}, "LLMChainFilter": {"How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/"}, "EmbeddingsFilter": {"How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/"}, "DocumentCompressorPipeline": {"How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/"}, "EmbeddingsRedundantFilter": {"How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/"}, "Comparator": {"How to construct filters for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_constructing_filters/"}, "Comparison": {"How to construct filters for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_constructing_filters/"}, "Operation": {"How to construct filters for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_constructing_filters/"}, "Operator": {"How to construct filters for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_constructing_filters/"}, "StructuredQuery": {"How to construct filters for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_constructing_filters/"}, "ChromaTranslator": {"How to construct filters for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_constructing_filters/", "How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/"}, "ElasticsearchTranslator": {"How to construct filters for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_constructing_filters/"}, "WikipediaQueryRun": {"How to use built-in tools and toolkits": "https://python.langchain.com/v0.2/docs/how_to/tools_builtin/", "Wikipedia": "https://python.langchain.com/v0.2/docs/integrations/tools/wikipedia/"}, "WikipediaAPIWrapper": {"How to use built-in tools and toolkits": "https://python.langchain.com/v0.2/docs/how_to/tools_builtin/", "Wikipedia": "https://python.langchain.com/v0.2/docs/integrations/tools/wikipedia/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/"}, "CallbackManagerForRetrieverRun": {"How to create a custom Retriever": "https://python.langchain.com/v0.2/docs/how_to/custom_retriever/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/"}, "BaseRetriever": {"How to create a custom Retriever": "https://python.langchain.com/v0.2/docs/how_to/custom_retriever/"}, "LLMGraphTransformer": {"How to construct knowledge graphs": "https://python.langchain.com/v0.2/docs/how_to/graph_constructing/"}, "RetryOutputParser": {"How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/"}, "TimeWeightedVectorStoreRetriever": {"How to use a time-weighted vector store retriever": "https://python.langchain.com/v0.2/docs/how_to/time_weighted_vectorstore/"}, "InMemoryDocstore": {"How to use a time-weighted vector store retriever": "https://python.langchain.com/v0.2/docs/how_to/time_weighted_vectorstore/", "Annoy": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/"}, "mock_now": {"How to use a time-weighted vector store retriever": "https://python.langchain.com/v0.2/docs/how_to/time_weighted_vectorstore/"}, "RunnableGenerator": {"How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/"}, "OutputParserException": {"How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/"}, "BaseOutputParser": {"How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/", "How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/"}, "BaseGenerationOutputParser": {"How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/"}, "Generation": {"How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/"}, "DirectoryLoader": {"How to load documents from a directory": "https://python.langchain.com/v0.2/docs/how_to/document_loader_directory/", "Azure AI Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/", "StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/"}, "PythonLoader": {"How to load documents from a directory": "https://python.langchain.com/v0.2/docs/how_to/document_loader_directory/"}, "LanceDB": {"How to create and query vector stores": "https://python.langchain.com/v0.2/docs/how_to/vectorstores/", "LanceDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/"}, "SpacyTextSplitter": {"How to split text by tokens ": "https://python.langchain.com/v0.2/docs/how_to/split_by_token/", "spaCy": "https://python.langchain.com/v0.2/docs/integrations/providers/spacy/", "Atlas": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/atlas/"}, "SentenceTransformersTokenTextSplitter": {"How to split text by tokens ": "https://python.langchain.com/v0.2/docs/how_to/split_by_token/"}, "NLTKTextSplitter": {"How to split text by tokens ": "https://python.langchain.com/v0.2/docs/how_to/split_by_token/"}, "KonlpyTextSplitter": {"How to split text by tokens ": "https://python.langchain.com/v0.2/docs/how_to/split_by_token/"}, "WikipediaRetriever": {"How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "Wikipedia": "https://python.langchain.com/v0.2/docs/integrations/providers/wikipedia/"}, "UnstructuredHTMLLoader": {"How to load HTML": "https://python.langchain.com/v0.2/docs/how_to/document_loader_html/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/"}, "MultiQueryRetriever": {"How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/"}, "GraphCypherQAChain": {"How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "Neo4j": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "Memgraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/", "Diffbot": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/", "Apache AGE": "https://python.langchain.com/v0.2/docs/integrations/graphs/apache_age/", "Build a Question Answering application over a Graph Database": "https://python.langchain.com/v0.2/docs/tutorials/graph/"}, "Neo4jVector": {"How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "Neo4j": "https://python.langchain.com/v0.2/docs/integrations/providers/neo4j/", "Neo4j Vector Index": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/"}, "ParentDocumentRetriever": {"How to use the Parent Document Retriever": "https://python.langchain.com/v0.2/docs/how_to/parent_document_retriever/"}, "InMemoryStore": {"How to use the Parent Document Retriever": "https://python.langchain.com/v0.2/docs/how_to/parent_document_retriever/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/"}, "YamlOutputParser": {"How to parse YAML output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_yaml/"}, "PipelinePromptTemplate": {"How to compose prompts together": "https://python.langchain.com/v0.2/docs/how_to/prompts_composition/"}, "CacheBackedEmbeddings": {"Caching": "https://python.langchain.com/v0.2/docs/how_to/caching_embeddings/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/stores/astradb/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/stores/cassandra/"}, "LocalFileStore": {"Caching": "https://python.langchain.com/v0.2/docs/how_to/caching_embeddings/", "LocalFileStore": "https://python.langchain.com/v0.2/docs/integrations/stores/file_system/"}, "render_text_description": {"How to add ad-hoc tool calling capability to LLMs and Chat Models": "https://python.langchain.com/v0.2/docs/how_to/tools_prompting/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/"}, "RunnableSerializable": {"LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/"}, "Run": {"LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/"}, "MarkdownHeaderTextSplitter": {"How to split Markdown by Headers": "https://python.langchain.com/v0.2/docs/how_to/markdown_header_metadata_splitter/"}, "HTMLHeaderTextSplitter": {"How to split by HTML header ": "https://python.langchain.com/v0.2/docs/how_to/HTML_header_metadata_splitter/"}, "EnsembleRetriever": {"How to combine results from multiple retrievers": "https://python.langchain.com/v0.2/docs/how_to/ensemble_retriever/"}, "BM25Retriever": {"How to combine results from multiple retrievers": "https://python.langchain.com/v0.2/docs/how_to/ensemble_retriever/", "BM25": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bm25/", "Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/"}, "ChatVertexAI": {"Response metadata": "https://python.langchain.com/v0.2/docs/how_to/response_metadata/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google SQL for MySQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mysql/", "Google AlloyDB for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_alloydb/", "Google El Carro Oracle": "https://python.langchain.com/v0.2/docs/integrations/memory/google_el_carro/", "Google SQL for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_pg/", "Google SQL for SQL Server": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "ChatVertexAI": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/"}, "ChatMistralAI": {"Response metadata": "https://python.langchain.com/v0.2/docs/how_to/response_metadata/", "MistralAI": "https://python.langchain.com/v0.2/docs/integrations/providers/mistralai/", "Build an Extraction Chain": "https://python.langchain.com/v0.2/docs/tutorials/extraction/"}, "ChatGroq": {"Response metadata": "https://python.langchain.com/v0.2/docs/how_to/response_metadata/", "Groq": "https://python.langchain.com/v0.2/docs/integrations/chat/groq/"}, "ChatFireworks": {"Response metadata": "https://python.langchain.com/v0.2/docs/how_to/response_metadata/", "ChatFireworks": "https://python.langchain.com/v0.2/docs/integrations/chat/fireworks/"}, "set_verbose": {"How to debug your LLM apps": "https://python.langchain.com/v0.2/docs/how_to/debugging/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/"}, "set_debug": {"How to debug your LLM apps": "https://python.langchain.com/v0.2/docs/how_to/debugging/", "Document Comparison": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "TextGen": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/"}, "MaxMarginalRelevanceExampleSelector": {"How to select examples by maximal marginal relevance (MMR)": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_mmr/"}, "AttributeInfo": {"How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "Milvus": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/milvus_self_query/", "PGVector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pgvector_self_query/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/weaviate_self_query/", "Vectara ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "DashVector": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/databricks_vector_search/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/opensearch_self_query/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/elasticsearch_self_query/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/chroma_self_query/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/tencentvectordb/", "Timescale Vector (Postgres) ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "Astra DB (Cassandra)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/astradb/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/supabase_self_query/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/redis_self_query/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/myscale_self_query/", "Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Qdrant": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/"}, "SelfQueryRetriever": {"How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "Milvus": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/milvus_self_query/", "PGVector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pgvector_self_query/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/weaviate_self_query/", "Vectara ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "DashVector": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/databricks_vector_search/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/opensearch_self_query/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/elasticsearch_self_query/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/providers/chroma/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/tencentvectordb/", "Timescale Vector (Postgres) ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "Astra DB (Cassandra)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/astradb/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/supabase_self_query/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/redis_self_query/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/myscale_self_query/", "Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Qdrant": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/"}, "StructuredQueryOutputParser": {"How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/"}, "get_query_constructor_prompt": {"How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/"}, "Cassandra": {"Hybrid Search": "https://python.langchain.com/v0.2/docs/how_to/hybrid/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/"}, "HTMLSectionSplitter": {"How to split by HTML sections": "https://python.langchain.com/v0.2/docs/how_to/HTML_section_aware_splitter/"}, "JSONLoader": {"How to load JSON": "https://python.langchain.com/v0.2/docs/how_to/document_loader_json/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/"}, "UpstashRedisCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Upstash Vector": "https://python.langchain.com/v0.2/docs/integrations/providers/upstash/"}, "RedisCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/redis/"}, "RedisSemanticCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/redis/"}, "GPTCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/"}, "MomentoCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/"}, "SQLAlchemyCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/"}, "CassandraCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/"}, "CassandraSemanticCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/"}, "AzureCosmosDBSemanticCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/"}, "CosmosDBSimilarityType": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Azure Cosmos DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/"}, "CosmosDBVectorSearchType": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Azure Cosmos DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/"}, "load_summarize_chain": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Infino": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "LarkSuite (FeiShu)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/larksuite/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/"}, "OpenSearchSemanticCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/"}, "map_ai_messages": {"WeChat": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/wechat/", "Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "GMail": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/gmail/", "Slack": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/slack/", "WhatsApp": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/whatsapp/", "iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "Telegram": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/telegram/", "Discord": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/"}, "merge_chat_runs": {"WeChat": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/wechat/", "Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "Slack": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/slack/", "WhatsApp": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/whatsapp/", "iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "Telegram": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/telegram/", "Discord": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/"}, "ChatSession": {"WeChat": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/wechat/", "Slack": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/slack/", "WhatsApp": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/whatsapp/", "iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "Telegram": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/telegram/", "Discord": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/"}, "FolderFacebookMessengerChatLoader": {"Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/"}, "SingleFileFacebookMessengerChatLoader": {"Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/"}, "convert_messages_for_finetuning": {"Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "LangSmith LLM Runs": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/", "LangSmith Chat Datasets": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_dataset/", "iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/"}, "convert_message_to_dict": {"Twitter (via Apify)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/twitter/"}, "convert_pydantic_to_openai_function": {"LangSmith LLM Runs": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/"}, "PydanticOutputFunctionsParser": {"LangSmith LLM Runs": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/"}, "LangSmithRunChatLoader": {"LangSmith LLM Runs": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/"}, "GMailLoader": {"GMail": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/gmail/"}, "SlackChatLoader": {"Slack": "https://python.langchain.com/v0.2/docs/integrations/providers/slack/"}, "WhatsAppChatLoader": {"WhatsApp": "https://python.langchain.com/v0.2/docs/integrations/providers/whatsapp/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/", "WhatsApp Chat": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/whatsapp_chat/"}, "LangSmithDatasetChatLoader": {"LangSmith Chat Datasets": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_dataset/"}, "IMessageChatLoader": {"iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/"}, "TelegramChatLoader": {"Telegram": "https://python.langchain.com/v0.2/docs/integrations/providers/telegram/"}, "BookendEmbeddings": {"Bookend AI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/bookend/"}, "SolarEmbeddings": {"Solar": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/solar/"}, "HuggingFaceBgeEmbeddings": {"BGE on Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/bge_huggingface/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/"}, "QuantizedBiEncoderEmbeddings": {"Embedding Documents using Optimized and Quantized Embedders": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/optimum_intel/", "Intel": "https://python.langchain.com/v0.2/docs/integrations/providers/intel/"}, "FireworksEmbeddings": {"FireworksEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/fireworks/"}, "XinferenceEmbeddings": {"Xorbits inference (Xinference)": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/xinference/"}, "LLMRailsEmbeddings": {"LLMRails": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/llm_rails/"}, "DeepInfraEmbeddings": {"DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/providers/deepinfra/"}, "HuggingFaceInferenceAPIEmbeddings": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/huggingfacehub/"}, "HuggingFaceEndpointEmbeddings": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/huggingfacehub/", "Text Embeddings Inference": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/text_embeddings_inference/"}, "GoogleGenerativeAIEmbeddings": {"Google Generative AI Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/google_generative_ai/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/"}, "GPT4AllEmbeddings": {"GPT4All": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/gpt4all/", "ManticoreSearch VectorStore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/manticore_search/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/"}, "MosaicMLInstructorEmbeddings": {"MosaicML": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/mosaicml/"}, "QuantizedBgeEmbeddings": {"Intel\u00ae Extension for Transformers Quantized Text Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/itrex/", "Intel": "https://python.langchain.com/v0.2/docs/integrations/providers/intel/"}, "VertexAIEmbeddings": {"Google Vertex AI PaLM ": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/google_vertex_ai_palm/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Cloud SQL for MySQL": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_cloud_sql_mysql/", "Google Vertex AI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "Google AlloyDB for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_alloydb/", "Google Firestore (Native Mode)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_firestore/", "Google BigQuery Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_bigquery_vector_search/", "Google Spanner": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_spanner/", "Google Cloud SQL for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_cloud_sql_pg/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/"}, "BedrockEmbeddings": {"Bedrock": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/bedrock/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "GigaChatEmbeddings": {"GigaChat": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/gigachat/", "Salute Devices": "https://python.langchain.com/v0.2/docs/integrations/providers/salute_devices/"}, "OllamaEmbeddings": {"Ollama": "https://python.langchain.com/v0.2/docs/integrations/providers/ollama/"}, "OCIGenAIEmbeddings": {"Oracle Cloud Infrastructure Generative AI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/oci_generative_ai/", "Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/", "# Oracle Cloud Infrastructure Generative AI": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/"}, "FastEmbedEmbeddings": {"FastEmbed by Qdrant": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/fastembed/"}, "LlamaCppEmbeddings": {"Llama-cpp": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/llamacpp/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/providers/llamacpp/"}, "NLPCloudEmbeddings": {"NLP Cloud": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nlp_cloud/", "NLPCloud": "https://python.langchain.com/v0.2/docs/integrations/providers/nlpcloud/"}, "LaserEmbeddings": {"LASER Language-Agnostic SEntence Representations Embeddings by Meta AI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/laser/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/"}, "OpenCLIPEmbeddings": {"OpenClip": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/open_clip/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/"}, "TitanTakeoffEmbed": {"Titan Takeoff": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/titan_takeoff/"}, "MistralAIEmbeddings": {"MistralAI": "https://python.langchain.com/v0.2/docs/integrations/providers/mistralai/"}, "SpacyEmbeddings": {"SpaCy": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/spacy_embedding/", "spaCy": "https://python.langchain.com/v0.2/docs/integrations/providers/spacy/"}, "BaichuanTextEmbeddings": {"Baichuan Text Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/baichuan/", "Baichuan": "https://python.langchain.com/v0.2/docs/integrations/providers/baichuan/"}, "TogetherEmbeddings": {"TogetherEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/together/"}, "HuggingFaceInstructEmbeddings": {"Instruct Embeddings on Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/instruct_embeddings/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/"}, "OracleEmbeddings": {"Oracle AI Vector Search: Generate Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/oracleai/", "OracleAI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/providers/oracleai/"}, "QianfanEmbeddingsEndpoint": {"Baidu Qianfan": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/baidu_qianfan_endpoint/", "ERNIE": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ernie/", "Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/", "Baidu Cloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/"}, "EdenAiEmbeddings": {"EDEN AI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/edenai/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "JohnSnowLabsEmbeddings": {"John Snow Labs": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/johnsnowlabs_embedding/"}, "ErnieEmbeddings": {"ERNIE": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ernie/"}, "ClarifaiEmbeddings": {"Clarifai": "https://python.langchain.com/v0.2/docs/integrations/providers/clarifai/"}, "AzureOpenAIEmbeddings": {"Azure OpenAI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/azureopenai/", "Azure AI Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/", "Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/"}, "InfinityEmbeddings": {"Infinity": "https://python.langchain.com/v0.2/docs/integrations/providers/infinity/"}, "InfinityEmbeddingsLocal": {"Infinity": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/infinity/"}, "AwaEmbeddings": {"AwaDB": "https://python.langchain.com/v0.2/docs/integrations/providers/awadb/"}, "VolcanoEmbeddings": {"Volc Engine": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/volcengine/"}, "MiniMaxEmbeddings": {"MiniMax": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/minimax/", "Minimax": "https://python.langchain.com/v0.2/docs/integrations/providers/minimax/"}, "FakeEmbeddings": {"Fake Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/fake/", "DocArray": "https://python.langchain.com/v0.2/docs/integrations/retrievers/docarray_retriever/", "Vectara ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vectara/", "Relyt": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/relyt/", "Tair": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "Google Memorystore for Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_memorystore_redis/", "PGVecto.rs": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "Baidu VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/"}, "ClovaEmbeddings": {"Clova Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/clova/"}, "NeMoEmbeddings": {"NVIDIA NeMo embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nemo/"}, "SparkLLMTextEmbeddings": {"SparkLLM Text Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sparkllm/"}, "PremAIEmbeddings": {"PremAI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/premai/"}, "KNNRetriever": {"Voyage AI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/voyageai/", "kNN": "https://python.langchain.com/v0.2/docs/integrations/retrievers/knn/"}, "SelfHostedEmbeddings": {"Self Hosted": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/self-hosted/"}, "SelfHostedHuggingFaceEmbeddings": {"Self Hosted": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/self-hosted/"}, "SelfHostedHuggingFaceInstructEmbeddings": {"Self Hosted": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/self-hosted/"}, "AnyscaleEmbeddings": {"Anyscale": "https://python.langchain.com/v0.2/docs/integrations/providers/anyscale/"}, "EmbaasEmbeddings": {"Embaas": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/embaas/"}, "YandexGPTEmbeddings": {"YandexGPT": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/yandex/"}, "JinaEmbeddings": {"Jina": "https://python.langchain.com/v0.2/docs/integrations/providers/jina/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/"}, "AlephAlphaAsymmetricSemanticEmbedding": {"Aleph Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/aleph_alpha/"}, "AlephAlphaSymmetricSemanticEmbedding": {"Aleph Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/aleph_alpha/"}, "CloudflareWorkersAIEmbeddings": {"Cloudflare Workers AI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/cloudflare_workersai/", "Cloudflare": "https://python.langchain.com/v0.2/docs/integrations/providers/cloudflare/"}, "DashScopeEmbeddings": {"DashScope": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/dashscope/", "DashVector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/"}, "TensorflowHubEmbeddings": {"TensorFlow Hub": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/tensorflowhub/"}, "LlamafileEmbeddings": {"llamafile": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/llamafile/"}, "GradientEmbeddings": {"Gradient": "https://python.langchain.com/v0.2/docs/integrations/providers/gradient/"}, "ModelScopeEmbeddings": {"ModelScope": "https://python.langchain.com/v0.2/docs/integrations/providers/modelscope/"}, "SagemakerEndpointEmbeddings": {"SageMaker": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sagemaker-endpoint/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "EmbeddingsContentHandler": {"SageMaker": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sagemaker-endpoint/"}, "DocArrayInMemorySearch": {"UpstageEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/upstage/", "DocArray InMemorySearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/"}, "SambaStudioEmbeddings": {"SambaNova": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sambanova/"}, "OpenVINOEmbeddings": {"OpenVINO": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/openvino/", "OpenVINO Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/"}, "OpenVINOBgeEmbeddings": {"OpenVINO": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/openvino/"}, "LocalAIEmbeddings": {"LocalAI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/localai/"}, "AzureAISearchRetriever": {"Azure AI Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/"}, "AzureSearch": {"Azure AI Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/"}, "RePhraseQueryRetriever": {"RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/"}, "YouSearchAPIWrapper": {"You.com": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "You.com Search": "https://python.langchain.com/v0.2/docs/integrations/tools/you/"}, "YouRetriever": {"You.com": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/"}, "Kinetica": {"Kinetica Vectorstore based Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kinetica/", "Kinetica": "https://python.langchain.com/v0.2/docs/integrations/providers/kinetica/", "Kinetica Vectorstore API": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/"}, "KineticaSettings": {"Kinetica Vectorstore based Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kinetica/", "Kinetica Vectorstore API": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/", "Kinetica": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/kinetica/"}, "Jaguar": {"JaguarDB Vector Database": "https://python.langchain.com/v0.2/docs/integrations/retrievers/jaguar/", "Jaguar": "https://python.langchain.com/v0.2/docs/integrations/providers/jaguar/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/"}, "BaseStore": {"Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/"}, "VectorStore": {"Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/"}, "AskNewsRetriever": {"AskNews": "https://python.langchain.com/v0.2/docs/integrations/retrievers/asknews/"}, "LLMLinguaCompressor": {"LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/"}, "RetrievalQA": {"LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "Bedrock (Knowledge Bases)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bedrock/", "Cohere reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "Confident": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "Document Comparison": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/", "ScaNN": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "Google Vertex AI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "Momento Vector Index (MVI)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "Activeloop Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "KDB.AI": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Amazon Document DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "VoyageAI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/"}, "ElasticSearchBM25Retriever": {"ElasticSearch BM25": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elastic_search_bm25/"}, "OutlineRetriever": {"Outline": "https://python.langchain.com/v0.2/docs/integrations/providers/outline/"}, "ConversationalRetrievalChain": {"Outline": "https://python.langchain.com/v0.2/docs/integrations/retrievers/outline/", "Wikipedia": "https://python.langchain.com/v0.2/docs/integrations/retrievers/wikipedia/", "SEC filing": "https://python.langchain.com/v0.2/docs/integrations/retrievers/sec_filings/", "Arxiv": "https://python.langchain.com/v0.2/docs/integrations/retrievers/arxiv/", "Rememberizer": "https://python.langchain.com/v0.2/docs/integrations/retrievers/rememberizer/", "Kay.ai": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kay/", "Vectara ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "Chat Over Documents with Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/"}, "ZepMemory": {"Zep Open Source": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/"}, "SearchScope": {"Zep Open Source": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/"}, "ZepRetriever": {"Zep Open Source": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/"}, "VespaRetriever": {"Vespa": "https://python.langchain.com/v0.2/docs/integrations/providers/vespa/"}, "AmazonKendraRetriever": {"Amazon Kendra": "https://python.langchain.com/v0.2/docs/integrations/retrievers/amazon_kendra_retriever/"}, "AmazonKnowledgeBasesRetriever": {"Bedrock (Knowledge Bases)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bedrock/"}, "Bedrock": {"Bedrock (Knowledge Bases)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bedrock/", "Bedrock": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/"}, "CohereEmbeddings": {"Cohere reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/"}, "Cohere": {"Cohere reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/"}, "ZepCloudMemory": {"Zep Cloud": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_cloud_memorystore/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/"}, "ZepCloudRetriever": {"Zep Cloud": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_cloud_memorystore/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/", "Zep": "https://python.langchain.com/v0.2/docs/integrations/providers/zep/"}, "NeuralDBRetriever": {"**NeuralDB**": "https://python.langchain.com/v0.2/docs/integrations/retrievers/thirdai_neuraldb/"}, "SingleStoreDB": {"SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/"}, "MetalRetriever": {"Metal": "https://python.langchain.com/v0.2/docs/integrations/providers/metal/"}, "BreebsRetriever": {"BREEBS (Open Knowledge)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/breebs/"}, "ChatGPTPluginRetriever": {"ChatGPT plugin": "https://python.langchain.com/v0.2/docs/integrations/retrievers/chatgpt-plugin/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/"}, "KayAiRetriever": {"SEC filing": "https://python.langchain.com/v0.2/docs/integrations/retrievers/sec_filings/", "Kay.ai": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kay/"}, "CohereRagRetriever": {"Cohere RAG": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/"}, "DriaRetriever": {"Dria": "https://python.langchain.com/v0.2/docs/integrations/retrievers/dria_index/"}, "DocArrayRetriever": {"DocArray": "https://python.langchain.com/v0.2/docs/integrations/retrievers/docarray_retriever/"}, "SVMRetriever": {"SVM": "https://python.langchain.com/v0.2/docs/integrations/retrievers/svm/"}, "TavilySearchAPIRetriever": {"Tavily Search API": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tavily/"}, "PineconeHybridSearchRetriever": {"Pinecone Hybrid Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/pinecone_hybrid_search/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/providers/pinecone/"}, "DeepLake": {"Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "Activeloop Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/"}, "AsyncHtmlLoader": {"Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "HTML to text": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/html2text/", "Markdownify": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/markdownify/", "AsyncHtml": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/async_html/"}, "Html2TextTransformer": {"Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "HTML to text": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/html2text/", "Async Chromium": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/async_chromium/"}, "create_structured_output_chain": {"Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/"}, "HumanMessagePromptTemplate": {"Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "JinaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/jinachat/", "Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "vLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "Context": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "SQL Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Figma": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/"}, "PubMedRetriever": {"PubMed": "https://python.langchain.com/v0.2/docs/integrations/providers/pubmed/"}, "WeaviateHybridSearchRetriever": {"Weaviate Hybrid Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/weaviate-hybrid/"}, "EmbedchainRetriever": {"Embedchain": "https://python.langchain.com/v0.2/docs/integrations/retrievers/embedchain/"}, "ArxivRetriever": {"Arxiv": "https://python.langchain.com/v0.2/docs/integrations/providers/arxiv/"}, "QdrantSparseVectorRetriever": {"Qdrant Sparse Vector": "https://python.langchain.com/v0.2/docs/integrations/retrievers/qdrant-sparse/"}, "DeterministicFakeEmbedding": {"Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/"}, "Embeddings": {"Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/", "Infinispan": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/infinispanvs/"}, "RememberizerRetriever": {"Rememberizer": "https://python.langchain.com/v0.2/docs/integrations/retrievers/rememberizer/"}, "ArceeRetriever": {"Arcee": "https://python.langchain.com/v0.2/docs/integrations/providers/arcee/"}, "FlashrankRerank": {"FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/"}, "ChaindeskRetriever": {"Chaindesk": "https://python.langchain.com/v0.2/docs/integrations/providers/chaindesk/"}, "MergerRetriever": {"LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/"}, "EmbeddingsClusteringFilter": {"LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/"}, "TFIDFRetriever": {"TF-IDF": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tf_idf/"}, "GoogleVertexAIMultiTurnSearchRetriever": {"Google Vertex AI Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/google_vertex_ai_search/"}, "GoogleVertexAISearchRetriever": {"Google Vertex AI Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/google_vertex_ai_search/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/"}, "PGVector": {"PGVector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pgvector_self_query/", "PGVector": "https://python.langchain.com/v0.2/docs/integrations/providers/pgvector/"}, "Weaviate": {"Weaviate": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/weaviate_self_query/"}, "Vectara": {"Vectara ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vectara/", "Chat Over Documents with Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/"}, "DashVector": {"DashVector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/"}, "Tongyi": {"DashVector": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "Tongyi Qwen": "https://python.langchain.com/v0.2/docs/integrations/llms/tongyi/"}, "DatabricksVectorSearch": {"Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/"}, "Dingo": {"DingoDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/"}, "OpenSearchVectorSearch": {"OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "ConnectionParams": {"Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "Baidu VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/"}, "MetaField": {"Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/"}, "TencentVectorDB": {"Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/"}, "TimescaleVector": {"Timescale Vector (Postgres) ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/"}, "AstraDB": {"Astra DB (Cassandra)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/astradb/"}, "SupabaseVectorStore": {"Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/"}, "Redis": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "MyScale": {"MyScale": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/"}, "MongoDBAtlasVectorSearch": {"MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/"}, "Qdrant": {"Qdrant": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/qdrant/"}, "AzureMLOnlineEndpoint": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "AzureOpenAI": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "Azure OpenAI": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_openai/"}, "AzureChatOpenAI": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "AzureChatOpenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/azure_chat_openai/", "Azure Cosmos DB for Apache Gremlin": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "AzureAIDataLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure AI Data": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_ai_data/"}, "AzureBlobStorageContainerLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Blob Storage Container": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_blob_storage_container/"}, "AzureBlobStorageFileLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Blob Storage File": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_blob_storage_file/"}, "OneDriveLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Microsoft OneDrive": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_onedrive/"}, "UnstructuredWordDocumentLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Microsoft Word": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_word/"}, "UnstructuredExcelLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Microsoft Excel": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_excel/"}, "SharePointLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Microsoft SharePoint": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_sharepoint/"}, "UnstructuredPowerPointLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Microsoft PowerPoint": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_powerpoint/"}, "OneNoteLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Microsoft OneNote": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_onenote/"}, "AzureCosmosDBVectorSearch": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Cosmos DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/"}, "O365Toolkit": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Office365": "https://python.langchain.com/v0.2/docs/integrations/toolkits/office365/"}, "PowerBIToolkit": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "PowerBI Dataset": "https://python.langchain.com/v0.2/docs/integrations/toolkits/powerbi/"}, "PowerBIDataset": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "PowerBI Dataset": "https://python.langchain.com/v0.2/docs/integrations/toolkits/powerbi/"}, "GremlinGraph": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Cosmos DB for Apache Gremlin": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "GraphDocument": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Cosmos DB for Apache Gremlin": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "Node": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Cosmos DB for Apache Gremlin": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "Relationship": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Cosmos DB for Apache Gremlin": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "BingSearchAPIWrapper": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Bing Search": "https://python.langchain.com/v0.2/docs/integrations/tools/bing_search/"}, "PresidioAnonymizer": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/"}, "PresidioReversibleAnonymizer": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/"}, "AmazonAPIGateway": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon API Gateway": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/"}, "ContentHandlerBase": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "S3DirectoryLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "AWS S3 Directory": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/aws_s3_directory/"}, "S3FileLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "AWS S3 File": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/aws_s3_file/"}, "AthenaLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Athena": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/athena/"}, "DynamoDBChatMessageHistory": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "AWS DynamoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/aws_dynamodb/"}, "NeptuneGraph": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon Neptune with Cypher": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/"}, "NeptuneAnalyticsGraph": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon Neptune with Cypher": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/"}, "NeptuneOpenCypherQAChain": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon Neptune with Cypher": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/"}, "NeptuneRdfGraph": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon Neptune with SPARQL": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_sparql/"}, "NeptuneSparqlQAChain": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon Neptune with SPARQL": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_sparql/"}, "SageMakerCallbackHandler": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "SageMaker Tracking": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/"}, "AmazonComprehendModerationChain": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "ChatHuggingFace": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/"}, "HuggingFacePipeline": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "Hugging Face Local Pipelines": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_pipelines/", "LM Format Enforcer": "https://python.langchain.com/v0.2/docs/integrations/llms/lmformatenforcer_experimental/", "RELLM": "https://python.langchain.com/v0.2/docs/integrations/llms/rellm_experimental/", "JSONFormer": "https://python.langchain.com/v0.2/docs/integrations/llms/jsonformer_experimental/", "OpenVINO": "https://python.langchain.com/v0.2/docs/integrations/llms/openvino/"}, "HuggingFaceHubEmbeddings": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/"}, "HuggingFaceDatasetLoader": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "HuggingFace dataset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/hugging_face_dataset/"}, "load_huggingface_tool": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "HuggingFace Hub Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/huggingface_tools/"}, "ChatGPTLoader": {"OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "ChatGPT Data": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/chatgpt_loader/"}, "DallEAPIWrapper": {"OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "Dall-E Image Generator": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/"}, "OpenAIModerationChain": {"OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/"}, "GoogleGenerativeAI": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_ai/"}, "VertexAIModelGarden": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/"}, "ChatGoogleGenerativeAI": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google AI chat models": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/"}, "VectorSearchVectorStore": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Vertex AI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/"}, "ScaNN": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "ScaNN": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/"}, "GoogleDocumentAIWarehouseRetriever": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/"}, "GoogleFinanceQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Finance": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/"}, "GoogleFinanceAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Finance": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/"}, "GoogleJobsQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Jobs": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/"}, "GoogleLensQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Lens": "https://python.langchain.com/v0.2/docs/integrations/tools/google_lens/"}, "GoogleLensAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Lens": "https://python.langchain.com/v0.2/docs/integrations/tools/google_lens/"}, "GooglePlacesTool": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Places": "https://python.langchain.com/v0.2/docs/integrations/tools/google_places/"}, "GoogleScholarQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Scholar": "https://python.langchain.com/v0.2/docs/integrations/tools/google_scholar/"}, "GoogleScholarAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Scholar": "https://python.langchain.com/v0.2/docs/integrations/tools/google_scholar/"}, "GoogleTrendsQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Trends": "https://python.langchain.com/v0.2/docs/integrations/tools/google_trends/"}, "GoogleTrendsAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Trends": "https://python.langchain.com/v0.2/docs/integrations/tools/google_trends/"}, "SearchApiAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/"}, "SerpAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "SerpAPI": "https://python.langchain.com/v0.2/docs/integrations/providers/serpapi/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/"}, "GoogleSerperAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Serper": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/"}, "YouTubeSearchTool": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "YouTube": "https://python.langchain.com/v0.2/docs/integrations/tools/youtube/"}, "YoutubeAudioLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/"}, "OpenAIWhisperParser": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/"}, "YoutubeLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "YouTube": "https://python.langchain.com/v0.2/docs/integrations/providers/youtube/", "YouTube transcripts": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_transcript/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/"}, "AnthropicLLM": {"Anthropic": "https://python.langchain.com/v0.2/docs/integrations/platforms/anthropic/", "AnthropicLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/anthropic/"}, "AIPluginTool": {"ChatGPT Plugins": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/"}, "AgentType": {"ChatGPT Plugins": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "Connery Action Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/connery/", "Google Jobs": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "Google Serper": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "E2B Data Analysis": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/", "Human as a tool": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "Yahoo Finance News": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/", "Google Finance": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "AWS Lambda": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "Google Drive": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "OpenWeatherMap": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "Memorize": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "Search Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/search_tools/", "Eleven Labs Text2Speech": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "Bearly Code Interpreter": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/", "Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "GraphQL": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Shell (bash)": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Comet Tracing": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "Argilla": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "MLflow": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "WandB Tracing": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "ClearML": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "Connery Toolkit": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/", "CSV": "https://python.langchain.com/v0.2/docs/integrations/toolkits/csv/", "Jira": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/", "Document Comparison": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "Azure Cognitive Services": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_cognitive_services/", "Natural Language APIs": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/", "Steam Game Recommendation & Game Details": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/", "Airbyte Question Answering": "https://python.langchain.com/v0.2/docs/integrations/toolkits/airbyte_structured_qa/", "Github": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "ClickUp": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/", "AINetwork": "https://python.langchain.com/v0.2/docs/integrations/toolkits/ainetwork/", "PlayWright Browser": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/", "Office365": "https://python.langchain.com/v0.2/docs/integrations/toolkits/office365/", "Pandas Dataframe": "https://python.langchain.com/v0.2/docs/integrations/toolkits/pandas/", "NASA": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/", "Gitlab": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/", "Amazon API Gateway": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/"}, "initialize_agent": {"ChatGPT Plugins": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "Connery Action Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/connery/", "Google Jobs": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "Google Serper": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "E2B Data Analysis": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/", "Human as a tool": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "Yahoo Finance News": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/", "Google Finance": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "AWS Lambda": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "Google Drive": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "OpenWeatherMap": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "Memorize": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "Search Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/search_tools/", "Eleven Labs Text2Speech": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "Bearly Code Interpreter": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/", "Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "GraphQL": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "Gradio": "https://python.langchain.com/v0.2/docs/integrations/tools/gradio_tools/", "SceneXplain": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Dall-E Image Generator": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "Shell (bash)": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "SageMaker Tracking": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Comet Tracing": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "Argilla": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "MLflow": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "WandB Tracing": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "ClearML": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "Connery Toolkit": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/", "Jira": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/", "Document Comparison": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "Azure Cognitive Services": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_cognitive_services/", "Natural Language APIs": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/", "Steam Game Recommendation & Game Details": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/", "Github": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "ClickUp": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/", "AINetwork": "https://python.langchain.com/v0.2/docs/integrations/toolkits/ainetwork/", "PlayWright Browser": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/", "Office365": "https://python.langchain.com/v0.2/docs/integrations/toolkits/office365/", "NASA": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/", "Gitlab": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/", "Amazon API Gateway": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/"}, "DataForSeoAPIWrapper": {"DataForSEO": "https://python.langchain.com/v0.2/docs/integrations/providers/dataforseo/"}, "Tool": {"DataForSEO": "https://python.langchain.com/v0.2/docs/integrations/tools/dataforseo/", "Python REPL": "https://python.langchain.com/v0.2/docs/integrations/tools/python/", "Google Serper": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "SerpAPI": "https://python.langchain.com/v0.2/docs/integrations/tools/serpapi/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "Google Search": "https://python.langchain.com/v0.2/docs/integrations/tools/google_search/", "Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "Ionic Shopping Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Document Comparison": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "Github": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/"}, "ConneryService": {"Connery Action Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/connery/", "Connery Toolkit": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/"}, "DataheraldAPIWrapper": {"Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/dataherald/"}, "SearxSearchWrapper": {"SearxNG Search": "https://python.langchain.com/v0.2/docs/integrations/tools/searx_search/", "SearxNG Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/searx/"}, "PythonREPL": {"Python REPL": "https://python.langchain.com/v0.2/docs/integrations/tools/python/"}, "GoogleJobsAPIWrapper": {"Google Jobs": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/"}, "create_openai_functions_agent": {"Tavily Search": "https://python.langchain.com/v0.2/docs/integrations/tools/tavily_search/", "Infobip": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/", "AskNews": "https://python.langchain.com/v0.2/docs/integrations/tools/asknews/", "Semantic Scholar API Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/", "Passio NutritionAI": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/", "You.com Search": "https://python.langchain.com/v0.2/docs/integrations/tools/you/", "Python": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "Gmail": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gmail/", "MultiOn": "https://python.langchain.com/v0.2/docs/integrations/toolkits/multion/", "Polygon IO Toolkit": "https://python.langchain.com/v0.2/docs/integrations/toolkits/polygon/"}, "InfobipAPIWrapper": {"Infobip": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/"}, "AskNewsSearch": {"AskNews": "https://python.langchain.com/v0.2/docs/integrations/tools/asknews/"}, "E2BDataAnalysisTool": {"E2B Data Analysis": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/"}, "HumanInputRun": {"Human as a tool": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/"}, "NucliaUnderstandingAPI": {"Nuclia Understanding": "https://python.langchain.com/v0.2/docs/integrations/tools/nuclia/", "Nuclia": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/nuclia/"}, "YahooFinanceNewsTool": {"Yahoo Finance News": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/"}, "WikidataAPIWrapper": {"Wikidata": "https://python.langchain.com/v0.2/docs/integrations/tools/wikidata/"}, "WikidataQueryRun": {"Wikidata": "https://python.langchain.com/v0.2/docs/integrations/tools/wikidata/"}, "TwilioAPIWrapper": {"Twilio": "https://python.langchain.com/v0.2/docs/integrations/tools/twilio/"}, "IFTTTWebhook": {"IFTTT WebHooks": "https://python.langchain.com/v0.2/docs/integrations/tools/ifttt/"}, "SemanticScholarQueryRun": {"Semantic Scholar API Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/"}, "AlphaVantageAPIWrapper": {"Alpha Vantage": "https://python.langchain.com/v0.2/docs/integrations/tools/alpha_vantage/"}, "GoogleCloudTextToSpeechTool": {"Google Cloud Text-to-Speech": "https://python.langchain.com/v0.2/docs/integrations/tools/google_cloud_texttospeech/"}, "OracleSummary": {"Oracle AI Vector Search: Generate Summary": "https://python.langchain.com/v0.2/docs/integrations/tools/oracleai/", "OracleAI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/providers/oracleai/"}, "StackExchangeAPIWrapper": {"StackExchange": "https://python.langchain.com/v0.2/docs/integrations/tools/stackexchange/", "Stack Exchange": "https://python.langchain.com/v0.2/docs/integrations/providers/stackexchange/"}, "TextRequestsWrapper": {"Requests": "https://python.langchain.com/v0.2/docs/integrations/tools/requests/"}, "JsonRequestsWrapper": {"Requests": "https://python.langchain.com/v0.2/docs/integrations/tools/requests/"}, "OpenWeatherMapAPIWrapper": {"OpenWeatherMap": "https://python.langchain.com/v0.2/docs/integrations/providers/openweathermap/"}, "get_from_env": {"Passio NutritionAI": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/"}, "NutritionAI": {"Passio NutritionAI": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/"}, "NutritionAIAPI": {"Passio NutritionAI": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/"}, "PubmedQueryRun": {"PubMed": "https://python.langchain.com/v0.2/docs/integrations/tools/pubmed/"}, "ConversationBufferMemory": {"Memorize": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "Gradio": "https://python.langchain.com/v0.2/docs/integrations/tools/gradio_tools/", "SceneXplain": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "NVIDIA NIMs": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "Chat Over Documents with Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "Bedrock": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/"}, "GradientLLM": {"Memorize": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "Gradient": "https://python.langchain.com/v0.2/docs/integrations/llms/gradient/"}, "ElevenLabsText2SpeechTool": {"Eleven Labs Text2Speech": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "ElevenLabs": "https://python.langchain.com/v0.2/docs/integrations/providers/elevenlabs/"}, "BearlyInterpreterTool": {"Bearly Code Interpreter": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/"}, "VectorstoreIndexCreator": {"Apify": "https://python.langchain.com/v0.2/docs/integrations/tools/apify/", "HuggingFace dataset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/hugging_face_dataset/", "Spreedly": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/spreedly/", "Image captions": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image_captions/", "Figma": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "Apify Dataset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/", "Iugu": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/iugu/", "Stripe": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/stripe/", "Modern Treasury": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/modern_treasury/"}, "ApifyWrapper": {"Apify": "https://python.langchain.com/v0.2/docs/integrations/providers/apify/"}, "ZapierToolkit": {"Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/"}, "ZapierNLAWrapper": {"Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/"}, "SimpleSequentialChain": {"Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "SageMaker Tracking": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Rebuff": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Predibase": "https://python.langchain.com/v0.2/docs/integrations/llms/predibase/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "Replicate": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/"}, "TransformChain": {"Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "Rebuff": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/"}, "ZapierNLARunAction": {"Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/"}, "RivaASR": {"NVIDIA Riva: ASR and TTS": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "RivaTTS": {"NVIDIA Riva: ASR and TTS": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "RivaAudioEncoding": {"NVIDIA Riva: ASR and TTS": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "AudioStream": {"NVIDIA Riva: ASR and TTS": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "GoldenQueryAPIWrapper": {"Golden Query": "https://python.langchain.com/v0.2/docs/integrations/tools/golden_query/", "Golden": "https://python.langchain.com/v0.2/docs/integrations/providers/golden/"}, "create_react_agent": {"ArXiv": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "Ionic Shopping Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/streamlit/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/dataherald/", "Python": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "Amadeus": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/"}, "ArxivAPIWrapper": {"ArXiv": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/"}, "DuckDuckGoSearchRun": {"DuckDuckGo Search": "https://python.langchain.com/v0.2/docs/integrations/tools/ddg/", "Github": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/"}, "DuckDuckGoSearchResults": {"DuckDuckGo Search": "https://python.langchain.com/v0.2/docs/integrations/tools/ddg/"}, "DuckDuckGoSearchAPIWrapper": {"DuckDuckGo Search": "https://python.langchain.com/v0.2/docs/integrations/tools/ddg/"}, "SceneXplainTool": {"SceneXplain": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/"}, "WolframAlphaAPIWrapper": {"Wolfram Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/wolfram_alpha/"}, "OpenAIFunctionsAgent": {"Exa Search": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Robocorp": "https://python.langchain.com/v0.2/docs/integrations/toolkits/robocorp/"}, "EdenAiExplicitImageTool": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiObjectDetectionTool": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiParsingIDTool": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiParsingInvoiceTool": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiSpeechToTextTool": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiTextModerationTool": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiTextToSpeechTool": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAI": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/"}, "MojeekSearch": {"Mojeek Search": "https://python.langchain.com/v0.2/docs/integrations/tools/mojeek_search/"}, "GoogleSearchAPIWrapper": {"Google Search": "https://python.langchain.com/v0.2/docs/integrations/tools/google_search/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/"}, "RedditSearchRun": {"Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "RedditSearchAPIWrapper": {"Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "RedditSearchSchema": {"Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "StructuredChatAgent": {"Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "ReadOnlySharedMemory": {"Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "YouSearchTool": {"You.com Search": "https://python.langchain.com/v0.2/docs/integrations/tools/you/"}, "VertexAIImageGeneratorChat": {"Google Imagen": "https://python.langchain.com/v0.2/docs/integrations/tools/google_imagen/"}, "VertexAIImageEditorChat": {"Google Imagen": "https://python.langchain.com/v0.2/docs/integrations/tools/google_imagen/"}, "VertexAIImageCaptioning": {"Google Imagen": "https://python.langchain.com/v0.2/docs/integrations/tools/google_imagen/"}, "VertexAIVisualQnAChat": {"Google Imagen": "https://python.langchain.com/v0.2/docs/integrations/tools/google_imagen/"}, "ShellTool": {"Shell (bash)": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/"}, "PolygonAggregates": {"Polygon Stock Market API Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonFinancials": {"Polygon Stock Market API Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonLastQuote": {"Polygon Stock Market API Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonTickerNews": {"Polygon Stock Market API Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonAPIWrapper": {"Polygon Stock Market API Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/", "Polygon IO Toolkit": "https://python.langchain.com/v0.2/docs/integrations/toolkits/polygon/"}, "PolygonAggregatesSchema": {"Polygon Stock Market API Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "FileManagementToolkit": {"File System": "https://python.langchain.com/v0.2/docs/integrations/tools/filesystem/"}, "BraveSearch": {"Brave Search": "https://python.langchain.com/v0.2/docs/integrations/providers/brave_search/"}, "RedisChatMessageHistory": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/"}, "ElasticsearchChatMessageHistory": {"Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/memory/elasticsearch_chat_message_history/"}, "UpstashRedisChatMessageHistory": {"Upstash Redis": "https://python.langchain.com/v0.2/docs/integrations/memory/upstash_redis_chat_message_history/", "Upstash Vector": "https://python.langchain.com/v0.2/docs/integrations/providers/upstash/"}, "ZepCloudChatMessageHistory": {"ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "Zep": "https://python.langchain.com/v0.2/docs/integrations/providers/zep/"}, "SingleStoreDBChatMessageHistory": {"SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/providers/singlestoredb/"}, "PostgresChatMessageHistory": {"Postgres": "https://python.langchain.com/v0.2/docs/integrations/memory/postgres_chat_message_history/"}, "MomentoChatMessageHistory": {"Momento Cache": "https://python.langchain.com/v0.2/docs/integrations/memory/momento_chat_message_history/", "Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/"}, "XataChatMessageHistory": {"Xata": "https://python.langchain.com/v0.2/docs/integrations/providers/xata/"}, "XataVectorStore": {"Xata": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/"}, "CassandraChatMessageHistory": {"Cassandra ": "https://python.langchain.com/v0.2/docs/integrations/memory/cassandra_chat_message_history/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/"}, "MotorheadMemory": {"Mot\u00f6rhead": "https://python.langchain.com/v0.2/docs/integrations/providers/motorhead/"}, "AstraDBChatMessageHistory": {"Astra DB ": "https://python.langchain.com/v0.2/docs/integrations/memory/astradb_chat_message_history/"}, "StreamlitChatMessageHistory": {"Streamlit": "https://python.langchain.com/v0.2/docs/integrations/providers/streamlit/"}, "Neo4jChatMessageHistory": {"Neo4j": "https://python.langchain.com/v0.2/docs/integrations/memory/neo4j_chat_message_history/"}, "TiDBChatMessageHistory": {"TiDB": "https://python.langchain.com/v0.2/docs/integrations/providers/tidb/"}, "RocksetChatMessageHistory": {"Rockset": "https://python.langchain.com/v0.2/docs/integrations/providers/rockset/"}, "SolarChat": {"solar.md": "https://python.langchain.com/v0.2/docs/integrations/chat/solar/"}, "HuggingFaceEndpoint": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "Huggingface Endpoints": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/"}, "format_log_to_str": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/"}, "ReActJsonSingleInputOutputParser": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "Amadeus": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/"}, "AzureMLChatOnlineEndpoint": {"AzureMLChatOnlineEndpoint": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/"}, "AzureMLEndpointApiType": {"AzureMLChatOnlineEndpoint": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/", "Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "CustomOpenAIChatContentFormatter": {"AzureMLChatOnlineEndpoint": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/"}, "ChatKinetica": {"Kinetica SqlAssist LLM Demo": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/", "Kinetica": "https://python.langchain.com/v0.2/docs/integrations/providers/kinetica/"}, "KineticaSqlOutputParser": {"Kinetica SqlAssist LLM Demo": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/"}, "KineticaSqlResponse": {"Kinetica SqlAssist LLM Demo": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/"}, "PaiEasChatEndpoint": {"Alibaba Cloud PAI EAS": "https://python.langchain.com/v0.2/docs/integrations/chat/alibaba_cloud_pai_eas/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/"}, "ChatCoze": {"Chat with Coze Bot": "https://python.langchain.com/v0.2/docs/integrations/chat/coze/"}, "ChatOctoAI": {"ChatOctoAI": "https://python.langchain.com/v0.2/docs/integrations/chat/octoai/"}, "ChatDeepInfra": {"DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/providers/deepinfra/"}, "ToolsOutputParser": {"ChatAnthropic": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/"}, "ChatLiteLLM": {"ChatLiteLLM": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/"}, "LlamaEdgeChatService": {"LlamaEdge": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_edge/"}, "HarmBlockThreshold": {"Google AI chat models": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/", "ChatVertexAI": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "Google AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_ai/"}, "HarmCategory": {"Google AI chat models": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/", "ChatVertexAI": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "Google AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_ai/"}, "OllamaFunctions": {"OllamaFunctions": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama_functions/", "Ollama": "https://python.langchain.com/v0.2/docs/integrations/providers/ollama/"}, "VolcEngineMaasChat": {"VolcEngineMaasChat": "https://python.langchain.com/v0.2/docs/integrations/chat/volcengine_maas/"}, "ChatLlamaAPI": {"ChatLlamaAPI": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_api/"}, "create_tagging_chain": {"ChatLlamaAPI": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_api/"}, "ChatKonko": {"ChatKonko": "https://python.langchain.com/v0.2/docs/integrations/chat/konko/"}, "create_structured_runnable": {"ChatVertexAI": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/"}, "MLXPipeline": {"MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "MLX Local Pipelines": "https://python.langchain.com/v0.2/docs/integrations/llms/mlx_pipelines/"}, "ChatMLX": {"MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/"}, "GigaChat": {"GigaChat": "https://python.langchain.com/v0.2/docs/integrations/llms/gigachat/", "Salute Devices": "https://python.langchain.com/v0.2/docs/integrations/providers/salute_devices/"}, "JinaChat": {"JinaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/jinachat/"}, "SystemMessagePromptTemplate": {"JinaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/jinachat/", "vLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Figma": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/"}, "ChatOllama": {"ChatOllama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "Ollama": "https://python.langchain.com/v0.2/docs/integrations/providers/ollama/"}, "ChatEverlyAI": {"ChatEverlyAI": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/"}, "GPTRouter": {"GPTRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/"}, "GPTRouterModel": {"GPTRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/"}, "ChatLiteLLMRouter": {"ChatLiteLLMRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/"}, "ChatFriendli": {"ChatFriendli": "https://python.langchain.com/v0.2/docs/integrations/chat/friendli/"}, "ChatZhipuAI": {"ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/"}, "create_json_chat_agent": {"ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/"}, "ChatBaichuan": {"Chat with Baichuan-192K": "https://python.langchain.com/v0.2/docs/integrations/chat/baichuan/", "Baichuan": "https://python.langchain.com/v0.2/docs/integrations/providers/baichuan/"}, "ChatTogether": {"Together AI": "https://python.langchain.com/v0.2/docs/integrations/llms/together/"}, "Together": {"Together AI": "https://python.langchain.com/v0.2/docs/integrations/llms/together/"}, "Llama2Chat": {"Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/"}, "HuggingFaceTextGenInference": {"Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/"}, "QianfanChatEndpoint": {"QianfanChatEndpoint": "https://python.langchain.com/v0.2/docs/integrations/chat/baidu_qianfan_endpoint/", "ErnieBotChat": "https://python.langchain.com/v0.2/docs/integrations/chat/ernie/", "Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/"}, "ChatEdenAI": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "ErnieBotChat": {"ErnieBotChat": "https://python.langchain.com/v0.2/docs/integrations/chat/ernie/"}, "ChatHunyuan": {"Tencent Hunyuan": "https://python.langchain.com/v0.2/docs/integrations/chat/tencent_hunyuan/", "Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/"}, "MiniMaxChat": {"MiniMaxChat": "https://python.langchain.com/v0.2/docs/integrations/chat/minimax/", "Minimax": "https://python.langchain.com/v0.2/docs/integrations/providers/minimax/"}, "ChatYuan2": {"Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/"}, "ChatTongyi": {"ChatTongyi": "https://python.langchain.com/v0.2/docs/integrations/chat/tongyi/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/"}, "PromptLayerChatOpenAI": {"PromptLayerChatOpenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/promptlayer_chatopenai/", "PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/providers/promptlayer/"}, "ChatSparkLLM": {"SparkLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/sparkllm/"}, "MoonshotChat": {"MoonshotChat": "https://python.langchain.com/v0.2/docs/integrations/chat/moonshot/"}, "ChatDappierAI": {"Dappier AI": "https://python.langchain.com/v0.2/docs/integrations/chat/dappier/"}, "ChatMaritalk": {"Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/"}, "load_qa_chain": {"Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "Chat Over Documents with Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "Amazon Textract ": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/amazon_textract/", "SageMakerEndpoint": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/"}, "ChatPremAI": {"ChatPremAI": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/"}, "ChatAnyscale": {"ChatAnyscale": "https://python.langchain.com/v0.2/docs/integrations/chat/anyscale/", "Anyscale": "https://python.langchain.com/v0.2/docs/integrations/providers/anyscale/"}, "ChatYandexGPT": {"ChatYandexGPT": "https://python.langchain.com/v0.2/docs/integrations/chat/yandex/", "Yandex": "https://python.langchain.com/v0.2/docs/integrations/providers/yandex/"}, "ChatPerplexity": {"ChatPerplexity": "https://python.langchain.com/v0.2/docs/integrations/chat/perplexity/"}, "ChatAnthropicTools": {"[Deprecated] Experimental Anthropic Tools Wrapper": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic_functions/"}, "ConversationChain": {"NVIDIA NIMs": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "Bedrock": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/"}, "DeepEvalCallbackHandler": {"Confident": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "Confident AI": "https://python.langchain.com/v0.2/docs/integrations/providers/confident/"}, "LLMonitorCallbackHandler": {"LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/providers/llmonitor/"}, "identify": {"LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/"}, "ContextCallbackHandler": {"Context": "https://python.langchain.com/v0.2/docs/integrations/providers/context/"}, "FiddlerCallbackHandler": {"Fiddler": "https://python.langchain.com/v0.2/docs/integrations/providers/fiddler/"}, "LabelStudioCallbackHandler": {"Label Studio": "https://python.langchain.com/v0.2/docs/integrations/providers/labelstudio/"}, "CometTracer": {"Comet Tracing": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/"}, "ArgillaCallbackHandler": {"Argilla": "https://python.langchain.com/v0.2/docs/integrations/providers/argilla/"}, "StdOutCallbackHandler": {"Argilla": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "ClearML": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/"}, "PromptLayerCallbackHandler": {"PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/providers/promptlayer/"}, "StreamlitCallbackHandler": {"Streamlit": "https://python.langchain.com/v0.2/docs/integrations/providers/streamlit/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/providers/gpt4all/"}, "UpTrainCallbackHandler": {"UpTrain": "https://python.langchain.com/v0.2/docs/integrations/providers/uptrain/"}, "TrubricsCallbackHandler": {"Trubrics": "https://python.langchain.com/v0.2/docs/integrations/providers/trubrics/"}, "InfinoCallbackHandler": {"Infino": "https://python.langchain.com/v0.2/docs/integrations/providers/infino/"}, "FigmaFileLoader": {"Figma": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/"}, "Baseten": {"Baseten": "https://python.langchain.com/v0.2/docs/integrations/llms/baseten/"}, "WeatherDataLoader": {"Weather": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/weather/"}, "Tair": {"Tair": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/"}, "CollegeConfidentialLoader": {"College Confidential": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/college_confidential/"}, "RWKV": {"RWKV-4": "https://python.langchain.com/v0.2/docs/integrations/providers/rwkv/"}, "LakeFSLoader": {"lakeFS": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/lakefs/"}, "FaunaLoader": {"Fauna": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/fauna/"}, "OCIGenAI": {"Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/", "# Oracle Cloud Infrastructure Generative AI": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/"}, "OCIModelDeploymentVLLM": {"Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/", "OCI Data Science Model Deployment Endpoint": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_model_deployment_endpoint/"}, "OCIModelDeploymentTGI": {"Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/", "OCI Data Science Model Deployment Endpoint": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_model_deployment_endpoint/"}, "OracleDocLoader": {"OracleAI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/providers/oracleai/", "Oracle AI Vector Search: Document Processing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/oracleai/"}, "OracleTextSplitter": {"OracleAI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/providers/oracleai/", "Oracle AI Vector Search: Document Processing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/oracleai/"}, "OracleVS": {"OracleAI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/providers/oracleai/", "Oracle AI Vector Search: Vector Store": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/oracle/"}, "Lantern": {"Lantern": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/"}, "Fireworks": {"Fireworks": "https://python.langchain.com/v0.2/docs/integrations/llms/fireworks/"}, "DropboxLoader": {"Dropbox": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/dropbox/"}, "ForefrontAI": {"ForefrontAI": "https://python.langchain.com/v0.2/docs/integrations/llms/forefrontai/"}, "CometCallbackHandler": {"Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/"}, "CTransformers": {"C Transformers": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/"}, "BiliBiliLoader": {"BiliBili": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/bilibili/"}, "TencentCOSDirectoryLoader": {"Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/", "Tencent COS Directory": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tencent_cos_directory/"}, "TencentCOSFileLoader": {"Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/", "Tencent COS File": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tencent_cos_file/"}, "OBSDirectoryLoader": {"Huawei": "https://python.langchain.com/v0.2/docs/integrations/providers/huawei/", "Huawei OBS Directory": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/huawei_obs_directory/"}, "OBSFileLoader": {"Huawei": "https://python.langchain.com/v0.2/docs/integrations/providers/huawei/", "Huawei OBS File": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/huawei_obs_file/"}, "DiffbotLoader": {"Diffbot": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/diffbot/"}, "DiffbotGraphTransformer": {"Diffbot": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/", "Neo4j": "https://python.langchain.com/v0.2/docs/integrations/providers/neo4j/"}, "DeepSparse": {"DeepSparse": "https://python.langchain.com/v0.2/docs/integrations/llms/deepsparse/"}, "AimCallbackHandler": {"Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/"}, "ModernTreasuryLoader": {"Modern Treasury": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/modern_treasury/"}, "GitHubIssuesLoader": {"GitHub": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/github/"}, "GithubFileLoader": {"GitHub": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/github/"}, "Banana": {"Banana": "https://python.langchain.com/v0.2/docs/integrations/llms/banana/"}, "InfinispanVS": {"Infinispan VS": "https://python.langchain.com/v0.2/docs/integrations/providers/infinispanvs/", "Infinispan": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/infinispanvs/"}, "CerebriumAI": {"CerebriumAI": "https://python.langchain.com/v0.2/docs/integrations/llms/cerebriumai/"}, "GutenbergLoader": {"Gutenberg": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/gutenberg/"}, "WikipediaLoader": {"Wikipedia": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/wikipedia/", "Diffbot": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/"}, "ConfluenceLoader": {"Confluence": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/confluence/"}, "Predibase": {"Predibase": "https://python.langchain.com/v0.2/docs/integrations/llms/predibase/"}, "Beam": {"Beam": "https://python.langchain.com/v0.2/docs/integrations/llms/beam/"}, "GrobidParser": {"Grobid": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/grobid/"}, "Typesense": {"Typesense": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/"}, "Hologres": {"Hologres": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/"}, "ArangoGraph": {"ArangoDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/arangodb/"}, "ArangoGraphQAChain": {"ArangoDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/arangodb/"}, "ArcGISLoader": {"ArcGIS": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/arcgis/"}, "WandbCallbackHandler": {"Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/"}, "ObsidianLoader": {"Obsidian": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/obsidian/"}, "SQLDatabaseToolkit": {"CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "SQL Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/"}, "BrowserbaseLoader": {"Browserbase": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/browserbase/"}, "Nebula": {"Nebula": "https://python.langchain.com/v0.2/docs/integrations/providers/symblai_nebula/", "Nebula (Symbl.ai)": "https://python.langchain.com/v0.2/docs/integrations/llms/symblai_nebula/"}, "Writer": {"Writer": "https://python.langchain.com/v0.2/docs/integrations/llms/writer/"}, "BaichuanLLM": {"Baichuan": "https://python.langchain.com/v0.2/docs/integrations/providers/baichuan/", "Baichuan LLM": "https://python.langchain.com/v0.2/docs/integrations/llms/baichuan/"}, "ApacheDoris": {"Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/"}, "ZepCloudVectorStore": {"Zep": "https://python.langchain.com/v0.2/docs/integrations/providers/zep/", "Zep Cloud": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep_cloud/"}, "BrowserlessLoader": {"Browserless": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/browserless/"}, "AZLyricsLoader": {"AZLyrics": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azlyrics/"}, "ToMarkdownLoader": {"2Markdown": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tomarkdown/"}, "Mlflow": {"MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/"}, "MlflowEmbeddings": {"MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/"}, "ChatMlflow": {"MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/"}, "GitLoader": {"Git": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/git/"}, "MlflowAIGateway": {"MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/"}, "MlflowAIGatewayEmbeddings": {"MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/"}, "ChatMLflowAIGateway": {"MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/"}, "Tigris": {"Tigris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/"}, "Meilisearch": {"Meilisearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/"}, "SQLDatabaseChain": {"Rebuff": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/"}, "SnowflakeLoader": {"Snowflake": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/snowflake/"}, "CubeSemanticLoader": {"Cube": "https://python.langchain.com/v0.2/docs/integrations/providers/cube/", "Cube Semantic Layer": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/cube_semantic/"}, "Clickhouse": {"ClickHouse": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/"}, "ClickhouseSettings": {"ClickHouse": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/"}, "ChatDatabricks": {"-> content='Hello! How can I assist you today?'": "https://python.langchain.com/v0.2/docs/integrations/providers/databricks/", "Databricks": "https://python.langchain.com/v0.2/docs/integrations/llms/databricks/"}, "DatabricksEmbeddings": {"-> content='Hello! How can I assist you today?'": "https://python.langchain.com/v0.2/docs/integrations/providers/databricks/", "Databricks": "https://python.langchain.com/v0.2/docs/integrations/llms/databricks/"}, "TelegramChatFileLoader": {"Telegram": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/telegram/"}, "TelegramChatApiLoader": {"Telegram": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/telegram/"}, "PredictionGuard": {"Prediction Guard": "https://python.langchain.com/v0.2/docs/integrations/llms/predictionguard/"}, "NotionDirectoryLoader": {"Notion DB": "https://python.langchain.com/v0.2/docs/integrations/providers/notion/", "Notion DB 1/2": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/notion/"}, "NotionDBLoader": {"Notion DB": "https://python.langchain.com/v0.2/docs/integrations/providers/notion/", "Notion DB 2/2": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/notiondb/"}, "MWDumpLoader": {"MediaWikiDump": "https://python.langchain.com/v0.2/docs/integrations/providers/mediawikidump/", "MediaWiki Dump": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mediawikidump/"}, "BraveSearchLoader": {"Brave Search": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/brave_search/"}, "StarRocks": {"StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/"}, "GooseAI": {"GooseAI": "https://python.langchain.com/v0.2/docs/integrations/llms/gooseai/"}, "DatadogLogsLoader": {"Datadog Logs": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/datadog_logs/"}, "ApifyDatasetLoader": {"Apify": "https://python.langchain.com/v0.2/docs/integrations/providers/apify/", "Apify Dataset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/"}, "NLPCloud": {"NLPCloud": "https://python.langchain.com/v0.2/docs/integrations/providers/nlpcloud/", "NLP Cloud": "https://python.langchain.com/v0.2/docs/integrations/llms/nlpcloud/"}, "Milvus": {"Milvus": "https://python.langchain.com/v0.2/docs/integrations/providers/milvus/", "Zilliz": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/"}, "SemaDB": {"SemaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/"}, "GitbookLoader": {"GitBook": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/gitbook/"}, "Rockset": {"Rockset": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/"}, "RocksetLoader": {"Rockset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rockset/"}, "Minimax": {"Minimax": "https://python.langchain.com/v0.2/docs/integrations/llms/minimax/"}, "UnstructuredAPIFileIOLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/"}, "UnstructuredAPIFileLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Unstructured File": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/unstructured_file/"}, "UnstructuredCHMLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/"}, "UnstructuredCSVLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "CSV": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/csv/"}, "UnstructuredEmailLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Email": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/email/"}, "UnstructuredEPubLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "EPub ": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/epub/"}, "UnstructuredFileIOLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Google Drive": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_drive/"}, "UnstructuredFileLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Unstructured File": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/unstructured_file/"}, "UnstructuredImageLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Images": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image/"}, "UnstructuredODTLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Open Document Format (ODT)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/odt/"}, "UnstructuredOrgModeLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Org-mode": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/org_mode/"}, "UnstructuredRSTLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "RST": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rst/"}, "UnstructuredRTFLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/"}, "UnstructuredTSVLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "TSV": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tsv/"}, "UnstructuredURLLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "URL": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/url/"}, "UnstructuredXMLLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "XML": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/xml/"}, "SelfHostedPipeline": {"Runhouse": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/"}, "SelfHostedHuggingFaceLLM": {"Runhouse": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/"}, "MlflowCallbackHandler": {"MLflow": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/"}, "SpreedlyLoader": {"Spreedly": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/spreedly/"}, "OpenLLM": {"OpenLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/openllm/"}, "PubMedLoader": {"PubMed": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pubmed/"}, "SearxSearchResults": {"SearxNG Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/searx/"}, "Modal": {"Modal": "https://python.langchain.com/v0.2/docs/integrations/llms/modal/"}, "OpenCityDataLoader": {"Geopandas": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/geopandas/", "Open City Data": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/open_city_data/"}, "PGEmbedding": {"Postgres Embedding": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/"}, "SQLiteVSS": {"SQLite": "https://python.langchain.com/v0.2/docs/integrations/providers/sqlite/", "SQLite-VSS": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/"}, "Xinference": {"Xorbits Inference (Xinference)": "https://python.langchain.com/v0.2/docs/integrations/llms/xinference/"}, "IFixitLoader": {"iFixit": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/ifixit/"}, "AlephAlpha": {"Aleph Alpha": "https://python.langchain.com/v0.2/docs/integrations/llms/aleph_alpha/"}, "PipelineAI": {"PipelineAI": "https://python.langchain.com/v0.2/docs/integrations/llms/pipelineai/"}, "FacebookChatLoader": {"Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/", "Facebook Chat": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/facebook_chat/"}, "Epsilla": {"Epsilla": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/"}, "AwaDB": {"AwaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/awadb/"}, "ArxivLoader": {"Arxiv": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/arxiv/"}, "BlockchainDocumentLoader": {"Alchemy": "https://python.langchain.com/v0.2/docs/integrations/providers/alchemy/", "Blockchain": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/blockchain/"}, "BlockchainType": {"Alchemy": "https://python.langchain.com/v0.2/docs/integrations/providers/alchemy/", "Blockchain": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/blockchain/"}, "Anyscale": {"Anyscale": "https://python.langchain.com/v0.2/docs/integrations/llms/anyscale/"}, "AINetworkToolkit": {"AINetwork": "https://python.langchain.com/v0.2/docs/integrations/toolkits/ainetwork/"}, "StripeLoader": {"Stripe": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/stripe/"}, "StochasticAI": {"StochasticAI": "https://python.langchain.com/v0.2/docs/integrations/llms/stochasticai/"}, "Bagel": {"BagelDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bageldb/", "Bagel": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bagel/"}, "TigerGraph": {"TigerGraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/tigergraph/"}, "BlackboardLoader": {"Blackboard": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/blackboard/"}, "YandexGPT": {"Yandex": "https://python.langchain.com/v0.2/docs/integrations/providers/yandex/", "YandexGPT": "https://python.langchain.com/v0.2/docs/integrations/llms/yandex/"}, "UpstashVectorStore": {"Upstash Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/upstash/"}, "NucliaTextTransformer": {"Nuclia": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/nuclia_transformer/"}, "NucliaLoader": {"Nuclia": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/nuclia/"}, "NucliaDB": {"Nuclia": "https://python.langchain.com/v0.2/docs/integrations/providers/nuclia/", "NucliaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/nucliadb/"}, "AnalyticDB": {"AnalyticDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/"}, "GoogleApiYoutubeLoader": {"YouTube": "https://python.langchain.com/v0.2/docs/integrations/providers/youtube/", "YouTube transcripts": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_transcript/"}, "PromptLayerOpenAI": {"PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/providers/promptlayer/", "PromptLayer OpenAI": "https://python.langchain.com/v0.2/docs/integrations/llms/promptlayer_openai/"}, "USearch": {"USearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/"}, "EtherscanLoader": {"Etherscan": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/etherscan/"}, "Arcee": {"Arcee": "https://python.langchain.com/v0.2/docs/integrations/llms/arcee/"}, "WhyLabsCallbackHandler": {"WhyLabs": "https://python.langchain.com/v0.2/docs/integrations/providers/whylabs_profiling/"}, "IuguLoader": {"Iugu": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/iugu/"}, "CouchbaseLoader": {"Couchbase": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/couchbase/"}, "FlyteCallbackHandler": {"Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/"}, "wandb_tracing_enabled": {"WandB Tracing": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/"}, "ManifestWrapper": {"Hazy Research": "https://python.langchain.com/v0.2/docs/integrations/providers/hazy_research/", "Manifest": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/"}, "OntotextGraphDBGraph": {"Ontotext GraphDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/ontotext/"}, "OntotextGraphDBQAChain": {"Ontotext GraphDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/ontotext/"}, "Marqo": {"Marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/"}, "IMSDbLoader": {"IMSDb": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/imsdb/"}, "TiDBLoader": {"TiDB": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tidb/"}, "TiDBVectorStore": {"TiDB": "https://python.langchain.com/v0.2/docs/integrations/providers/tidb/", "TiDB Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/"}, "DeepInfra": {"DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/llms/deepinfra/"}, "RedditPostsLoader": {"Reddit": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/reddit/"}, "TrelloLoader": {"Trello": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/trello/"}, "AtlasDB": {"Atlas": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/atlas/"}, "SKLearnVectorStore": {"scikit-learn": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/"}, "EverNoteLoader": {"EverNote": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/evernote/"}, "VDMS": {"VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "Intel's Visual Data Management System (VDMS)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/"}, "VDMS_Client": {"VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "Intel's Visual Data Management System (VDMS)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/"}, "TwitterTweetLoader": {"Twitter": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/twitter/"}, "DiscordChatLoader": {"Discord": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/discord/"}, "AssemblyAIAudioTranscriptLoader": {"AssemblyAI": "https://python.langchain.com/v0.2/docs/integrations/providers/assemblyai/", "AssemblyAI Audio Transcripts": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/assemblyai/"}, "KineticaLoader": {"Kinetica": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/kinetica/"}, "ClearMLCallbackHandler": {"ClearML": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/"}, "SlackDirectoryLoader": {"Slack": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/slack/"}, "HNLoader": {"Hacker News": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/hacker_news/", "Google Spanner": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_spanner/"}, "CTranslate2": {"CTranslate2": "https://python.langchain.com/v0.2/docs/integrations/llms/ctranslate2/"}, "QianfanLLMEndpoint": {"Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/", "Baidu Qianfan": "https://python.langchain.com/v0.2/docs/integrations/llms/baidu_qianfan_endpoint/"}, "BESVectorStore": {"Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/", "Baidu Cloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/"}, "BaiduVectorDB": {"Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/", "Baidu VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/"}, "Aphrodite": {"PygmalionAI": "https://python.langchain.com/v0.2/docs/integrations/providers/pygmalionai/", "Aphrodite Engine": "https://python.langchain.com/v0.2/docs/integrations/llms/aphrodite/"}, "PaiEasEndpoint": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "Alibaba Cloud PAI EAS": "https://python.langchain.com/v0.2/docs/integrations/llms/alibabacloud_pai_eas_endpoint/"}, "MaxComputeLoader": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "Alibaba Cloud MaxCompute": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/alibaba_cloud_maxcompute/"}, "AlibabaCloudOpenSearch": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "Alibaba Cloud OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/"}, "AlibabaCloudOpenSearchSettings": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "Alibaba Cloud OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/"}, "DocusaurusLoader": {"Docusaurus": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docusaurus/"}, "Annoy": {"Annoy": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/"}, "BibtexLoader": {"BibTeX": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/bibtex/"}, "CassandraLoader": {"Cassandra": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/cassandra/"}, "Vearch": {"Vearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/"}, "JoplinLoader": {"Joplin": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/joplin/"}, "ArthurCallbackHandler": {"Arthur": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/"}, "AcreomLoader": {"Acreom": "https://python.langchain.com/v0.2/docs/integrations/providers/acreom/", "acreom": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/acreom/"}, "KDBAI": {"KDB.AI": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/"}, "DuckDBLoader": {"DuckDB": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/duckdb/"}, "Petals": {"Petals": "https://python.langchain.com/v0.2/docs/integrations/llms/petals/"}, "MomentoVectorIndex": {"Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/", "Momento Vector Index (MVI)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/"}, "NIBittensorLLM": {"Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/"}, "AirtableLoader": {"Airtable": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airtable/"}, "LarkSuiteDocLoader": {"ByteDance": "https://python.langchain.com/v0.2/docs/integrations/providers/byte_dance/", "LarkSuite (FeiShu)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/larksuite/"}, "JavelinAIGateway": {"Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Javelin AI Gateway Tutorial": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/"}, "JavelinAIGatewayEmbeddings": {"Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Javelin AI Gateway Tutorial": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/"}, "ChatJavelinAIGateway": {"Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Javelin AI Gateway Tutorial": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/"}, "TensorflowDatasetLoader": {"TensorFlow Datasets": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tensorflow_datasets/"}, "Clarifai": {"Clarifai": "https://python.langchain.com/v0.2/docs/integrations/llms/clarifai/"}, "DataheraldTextToSQL": {"Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/dataherald/"}, "RoamLoader": {"Roam": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/roam/"}, "CONDENSE_QUESTION_PROMPT": {"Chat Over Documents with Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/"}, "load_qa_with_sources_chain": {"Chat Over Documents with Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/"}, "QA_PROMPT": {"Chat Over Documents with Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/"}, "RedisStore": {"RedisStore": "https://python.langchain.com/v0.2/docs/integrations/stores/redis/"}, "AstraDBStore": {"Astra DB": "https://python.langchain.com/v0.2/docs/integrations/stores/astradb/"}, "AstraDBByteStore": {"Astra DB": "https://python.langchain.com/v0.2/docs/integrations/stores/astradb/"}, "CassandraByteStore": {"Cassandra": "https://python.langchain.com/v0.2/docs/integrations/stores/cassandra/"}, "UpstashRedisByteStore": {"UpstashRedisByteStore": "https://python.langchain.com/v0.2/docs/integrations/stores/upstash_redis/"}, "ConneryToolkit": {"Connery Toolkit": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/"}, "create_csv_agent": {"CSV": "https://python.langchain.com/v0.2/docs/integrations/toolkits/csv/"}, "create_xorbits_agent": {"Xorbits": "https://python.langchain.com/v0.2/docs/integrations/toolkits/xorbits/"}, "JiraToolkit": {"Jira": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/"}, "JiraAPIWrapper": {"Jira": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/"}, "create_spark_dataframe_agent": {"Spark Dataframe": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark/"}, "PythonREPLTool": {"Python": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/"}, "create_pbi_agent": {"PowerBI Dataset": "https://python.langchain.com/v0.2/docs/integrations/toolkits/powerbi/"}, "AzureCognitiveServicesToolkit": {"Azure Cognitive Services": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_cognitive_services/"}, "NLAToolkit": {"Natural Language APIs": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/"}, "Requests": {"Natural Language APIs": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/"}, "GmailToolkit": {"Gmail": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gmail/"}, "build_resource_service": {"Gmail": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gmail/"}, "get_gmail_credentials": {"Gmail": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gmail/"}, "SlackToolkit": {"Slack": "https://python.langchain.com/v0.2/docs/integrations/toolkits/slack/"}, "SteamToolkit": {"Steam Game Recommendation & Game Details": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/"}, "SteamWebAPIWrapper": {"Steam Game Recommendation & Game Details": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/"}, "CassandraDatabaseToolkit": {"Cassandra Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cassandra_database/"}, "GetSchemaCassandraDatabaseTool": {"Cassandra Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cassandra_database/"}, "GetTableDataCassandraDatabaseTool": {"Cassandra Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cassandra_database/"}, "QueryCassandraDatabaseTool": {"Cassandra Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cassandra_database/"}, "CassandraDatabase": {"Cassandra Database": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cassandra_database/"}, "JsonToolkit": {"JSON": "https://python.langchain.com/v0.2/docs/integrations/toolkits/json/"}, "create_json_agent": {"JSON": "https://python.langchain.com/v0.2/docs/integrations/toolkits/json/"}, "JsonSpec": {"JSON": "https://python.langchain.com/v0.2/docs/integrations/toolkits/json/", "OpenAPI": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "AirbyteStripeLoader": {"Airbyte Question Answering": "https://python.langchain.com/v0.2/docs/integrations/toolkits/airbyte_structured_qa/", "Airbyte Stripe (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_stripe/"}, "GitHubToolkit": {"Github": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/"}, "GitHubAPIWrapper": {"Github": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/"}, "ConversationSummaryBufferMemory": {"Github": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/"}, "tracing_v2_enabled": {"Github": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/"}, "render_text_description_and_args": {"Github": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "Amadeus": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/"}, "ClickupToolkit": {"ClickUp": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/"}, "ClickupAPIWrapper": {"ClickUp": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/"}, "SparkSQLToolkit": {"Spark SQL": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark_sql/"}, "create_spark_sql_agent": {"Spark SQL": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark_sql/"}, "SparkSQL": {"Spark SQL": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark_sql/"}, "PlayWrightBrowserToolkit": {"PlayWright Browser": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/"}, "create_async_playwright_browser": {"PlayWright Browser": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/"}, "create_conversational_retrieval_agent": {"# Cogniswitch Tools": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cogniswitch/"}, "CogniswitchToolkit": {"# Cogniswitch Tools": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cogniswitch/"}, "NasaToolkit": {"NASA": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/"}, "NasaAPIWrapper": {"NASA": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/"}, "MultionToolkit": {"MultiOn": "https://python.langchain.com/v0.2/docs/integrations/toolkits/multion/"}, "AmadeusToolkit": {"Amadeus": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/"}, "HuggingFaceHub": {"Amadeus": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/"}, "AzureAiServicesToolkit": {"Azure AI Services": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_ai_services/"}, "create_structured_chat_agent": {"Azure AI Services": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_ai_services/"}, "reduce_openapi_spec": {"OpenAPI": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "RequestsWrapper": {"OpenAPI": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "OpenAPIToolkit": {"OpenAPI": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "create_openapi_agent": {"OpenAPI": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "GitLabToolkit": {"Gitlab": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/"}, "GitLabAPIWrapper": {"Gitlab": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/"}, "PolygonToolkit": {"Polygon IO Toolkit": "https://python.langchain.com/v0.2/docs/integrations/toolkits/polygon/"}, "ApacheDorisSettings": {"Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/"}, "DistanceStrategy": {"Kinetica Vectorstore API": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/", "Oracle AI Vector Search: Vector Store": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/oracle/", "Google BigQuery Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_bigquery_vector_search/", "SemaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/"}, "SentenceTransformerEmbeddings": {"SQLite-VSS": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/", "Vespa": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/"}, "Vald": {"Vald": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/"}, "RetrievalQAWithSourcesChain": {"Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Neo4j Vector Index": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "Marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "Psychic": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/"}, "Yellowbrick": {"Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/"}, "LLMRails": {"LLMRails": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/llm_rails/"}, "HanaDB": {"SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/"}, "VectorSearchVectorStoreDatastore": {"Google Vertex AI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/"}, "VertexAI": {"Google Vertex AI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/"}, "Hippo": {"Hippo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/"}, "RedisText": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "RedisNum": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "RedisTag": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "RedisFilter": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "VespaStore": {"Vespa": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/"}, "NeuralDBVectorStore": {"ThirdAI NeuralDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/thirdai_neuraldb/"}, "VikingDB": {"viking DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/"}, "VikingDBConfig": {"viking DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/"}, "CouchbaseVectorStore": {"Couchbase ": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/"}, "Relyt": {"Relyt": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/relyt/"}, "oraclevs": {"Oracle AI Vector Search: Vector Store": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/oracle/"}, "VLite": {"vlite": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vlite/"}, "DuckDB": {"DuckDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/"}, "StarRocksSettings": {"StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/"}, "PathwayVectorClient": {"Pathway": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pathway/"}, "DocArrayHnswSearch": {"DocArray HnswSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_hnsw/"}, "TileDB": {"TileDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/"}, "EcloudESVectorStore": {"China Mobile ECloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/"}, "SurrealDBStore": {"SurrealDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/"}, "ManticoreSearch": {"ManticoreSearch VectorStore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/manticore_search/"}, "ManticoreSearchSettings": {"ManticoreSearch VectorStore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/manticore_search/"}, "Aerospike": {"Aerospike": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/aerospike/"}, "ElasticVectorSearch": {"Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/"}, "PGVecto_rs": {"PGVecto.rs": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/"}, "BigQueryVectorSearch": {"Google BigQuery Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_bigquery_vector_search/"}, "ZepVectorStore": {"Zep": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/"}, "CollectionConfig": {"Zep": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/"}, "openai": {"OpenAI Adapter(Old)": "https://python.langchain.com/v0.2/docs/integrations/adapters/openai-old/", "OpenAI Adapter": "https://python.langchain.com/v0.2/docs/integrations/adapters/openai/"}, "RankLLMRerank": {"RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/"}, "AsyncChromiumLoader": {"Beautiful Soup": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/beautiful_soup/", "Async Chromium": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/async_chromium/"}, "BeautifulSoupTransformer": {"Beautiful Soup": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/beautiful_soup/"}, "OpenVINOReranker": {"OpenVINO Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/"}, "create_metadata_tagger": {"OpenAI metadata tagger": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/"}, "DoctranPropertyExtractor": {"Doctran: extract properties": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_extract_properties/"}, "DoctranQATransformer": {"Doctran: interrogate documents": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_interrogate_document/"}, "CrossEncoderReranker": {"Cross Encoder Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/"}, "HuggingFaceCrossEncoder": {"Cross Encoder Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/"}, "JinaRerank": {"Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/"}, "DoctranTextTranslator": {"Doctran: language translation": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_translate_document/"}, "MarkdownifyTransformer": {"Markdownify": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/markdownify/"}, "XorbitsLoader": {"Xorbits Pandas DataFrame": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/xorbits/"}, "OutlookMessageLoader": {"Email": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/email/"}, "TranscriptFormat": {"AssemblyAI Audio Transcripts": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/assemblyai/"}, "AirbyteSalesforceLoader": {"Airbyte Salesforce (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_salesforce/"}, "AirbyteCDKLoader": {"Airbyte CDK (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_cdk/"}, "Docx2txtLoader": {"Microsoft Word": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_word/"}, "RSpaceLoader": {"# replace these ids with some from your own research notes.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rspace/"}, "SeleniumURLLoader": {"URL": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/url/"}, "PlaywrightURLLoader": {"URL": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/url/"}, "AirbyteJSONLoader": {"Airbyte JSON (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_json/"}, "GeoDataFrameLoader": {"Geopandas": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/geopandas/"}, "AirbyteTypeformLoader": {"Airbyte Typeform (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_typeform/"}, "MHTMLLoader": {"mhtml": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mhtml/"}, "SpiderLoader": {"Spider": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/spider/"}, "NewsURLLoader": {"News URL": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/news/"}, "ImageCaptionLoader": {"Image captions": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image_captions/"}, "LLMSherpaFileLoader": {"LLM Sherpa": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/llmsherpa/"}, "ScrapflyLoader": {"# ScrapFly": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/scrapfly/"}, "TomlLoader": {"TOML": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/toml/"}, "PsychicLoader": {"Psychic": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/"}, "FireCrawlLoader": {"FireCrawl": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/firecrawl/"}, "LarkSuiteWikiLoader": {"LarkSuite (FeiShu)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/larksuite/"}, "FakeListLLM": {"LarkSuite (FeiShu)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/larksuite/"}, "MergedDataLoader": {"Merge Documents Loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/merge_doc/"}, "RecursiveUrlLoader": {"Recursive URL": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/recursive_url/"}, "AirbyteHubspotLoader": {"Airbyte Hubspot (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_hubspot/"}, "AirbyteGongLoader": {"Airbyte Gong (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_gong/"}, "AstraDBLoader": {"AstraDB": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/astradb/"}, "ReadTheDocsLoader": {"ReadTheDocs Documentation": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/readthedocs_documentation/"}, "PolarsDataFrameLoader": {"Polars DataFrame": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/polars_dataframe/"}, "DataFrameLoader": {"Pandas DataFrame": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pandas_dataframe/"}, "SurrealDBLoader": {"SurrealDB": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/surrealdb/"}, "GoogleApiClient": {"YouTube transcripts": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_transcript/"}, "ConcurrentLoader": {"Concurrent Loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/concurrent/"}, "RSSFeedLoader": {"RSS Feeds": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rss/"}, "PebbloSafeLoader": {"Pebblo Safe DocumentLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pebblo/"}, "VsdxLoader": {"Vsdx": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/vsdx/"}, "NotebookLoader": {"Jupyter Notebook": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/jupyter_notebook/"}, "OracleAutonomousDatabaseLoader": {"Oracle Autonomous Database": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/oracleadb_loader/"}, "LanguageParser": {"Source Code": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/"}, "SRTLoader": {"Subtitle": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/subtitle/"}, "MastodonTootsLoader": {"Mastodon": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mastodon/"}, "AirbyteShopifyLoader": {"Airbyte Shopify (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_shopify/"}, "GlueCatalogLoader": {"Glue Catalog": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/glue_catalog/"}, "PySparkDataFrameLoader": {"PySpark": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pyspark_dataframe/"}, "AirbyteZendeskSupportLoader": {"Airbyte Zendesk Support (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_zendesk_support/"}, "CoNLLULoader": {"CoNLL-U": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/conll-u/"}, "MongodbLoader": {"MongoDB": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mongodb/"}, "SitemapLoader": {"Sitemap": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/sitemap/"}, "YuqueLoader": {"Yuque": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/yuque/"}, "QuipLoader": {"Quip": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/quip/"}, "MemgraphGraph": {"Memgraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/"}, "GraphSparqlQAChain": {"RDFLib": "https://python.langchain.com/v0.2/docs/integrations/graphs/rdflib_sparql/"}, "RdfGraph": {"RDFLib": "https://python.langchain.com/v0.2/docs/integrations/graphs/rdflib_sparql/"}, "NebulaGraphQAChain": {"NebulaGraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/nebula_graph/"}, "NebulaGraph": {"NebulaGraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/nebula_graph/"}, "GremlinQAChain": {"Azure Cosmos DB for Apache Gremlin": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "GraphIndexCreator": {"NetworkX": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/"}, "GraphQAChain": {"NetworkX": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/"}, "NetworkxEntityGraph": {"NetworkX": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/"}, "HugeGraphQAChain": {"HugeGraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/hugegraph/"}, "HugeGraph": {"HugeGraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/hugegraph/"}, "AGEGraph": {"Apache AGE": "https://python.langchain.com/v0.2/docs/integrations/graphs/apache_age/"}, "KuzuQAChain": {"Kuzu": "https://python.langchain.com/v0.2/docs/integrations/graphs/kuzu_db/"}, "KuzuGraph": {"Kuzu": "https://python.langchain.com/v0.2/docs/integrations/graphs/kuzu_db/"}, "FalkorDBQAChain": {"FalkorDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/falkordb/"}, "FalkorDBGraph": {"FalkorDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/falkordb/"}, "ConversationBufferWindowMemory": {"Baseten": "https://python.langchain.com/v0.2/docs/integrations/llms/baseten/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/"}, "Solar": {"Solar": "https://python.langchain.com/v0.2/docs/integrations/llms/solar/"}, "IpexLLM": {"IPEX-LLM": "https://python.langchain.com/v0.2/docs/integrations/llms/ipex_llm/"}, "SagemakerEndpoint": {"SageMakerEndpoint": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/"}, "LLMContentHandler": {"SageMakerEndpoint": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/"}, "OctoAIEndpoint": {"OctoAI": "https://python.langchain.com/v0.2/docs/integrations/llms/octoai/"}, "TextGen": {"TextGen": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/"}, "MosaicML": {"MosaicML": "https://python.langchain.com/v0.2/docs/integrations/llms/mosaicml/"}, "VolcEngineMaasLLM": {"Volc Engine Maas": "https://python.langchain.com/v0.2/docs/integrations/llms/volcengine_maas/"}, "KoboldApiLLM": {"KoboldAI API": "https://python.langchain.com/v0.2/docs/integrations/llms/koboldai/"}, "Konko": {"Konko": "https://python.langchain.com/v0.2/docs/integrations/llms/konko/"}, "GemmaChatVertexAIModelGarden": {"Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/"}, "GemmaVertexAIModelGarden": {"Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/"}, "ChatAnthropicVertex": {"Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/"}, "OpaquePrompts": {"OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/"}, "TitanTakeoff": {"Titan Takeoff": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/"}, "Friendli": {"Friendli": "https://python.langchain.com/v0.2/docs/integrations/llms/friendli/"}, "Databricks": {"Databricks": "https://python.langchain.com/v0.2/docs/integrations/llms/databricks/"}, "LMFormatEnforcer": {"LM Format Enforcer": "https://python.langchain.com/v0.2/docs/integrations/llms/lmformatenforcer_experimental/"}, "VLLM": {"vLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/"}, "VLLMOpenAI": {"vLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/"}, "CustomOpenAIContentFormatter": {"Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "ContentFormatterBase": {"Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "DollyContentFormatter": {"Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "load_llm": {"Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "MapReduceChain": {"Manifest": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/"}, "ModelLaboratory": {"Manifest": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/"}, "ExLlamaV2": {"ExLlamaV2": "https://python.langchain.com/v0.2/docs/integrations/llms/exllamav2/"}, "RELLM": {"RELLM": "https://python.langchain.com/v0.2/docs/integrations/llms/rellm_experimental/"}, "Yuan2": {"Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/llms/yuan2/"}, "SparkLLM": {"SparkLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/sparkllm/"}, "Moonshot": {"MoonshotChat": "https://python.langchain.com/v0.2/docs/integrations/llms/moonshot/"}, "OpenLM": {"OpenLM": "https://python.langchain.com/v0.2/docs/integrations/llms/openlm/"}, "CloudflareWorkersAI": {"Cloudflare Workers AI": "https://python.langchain.com/v0.2/docs/integrations/llms/cloudflare_workersai/"}, "ChatGLM3": {"ChatGLM": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/"}, "ChatGLM": {"ChatGLM": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/"}, "Sambaverse": {"SambaNova": "https://python.langchain.com/v0.2/docs/integrations/llms/sambanova/"}, "SambaStudio": {"SambaNova": "https://python.langchain.com/v0.2/docs/integrations/llms/sambanova/"}, "LayerupSecurity": {"Layerup Security": "https://python.langchain.com/v0.2/docs/integrations/llms/layerup_security/"}, "JsonFormer": {"JSONFormer": "https://python.langchain.com/v0.2/docs/integrations/llms/jsonformer_experimental/"}, "WeightOnlyQuantPipeline": {"Intel Weight-Only Quantization": "https://python.langchain.com/v0.2/docs/integrations/llms/weight_only_quantization/"}, "Replicate": {"Replicate": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/"}, "RunnablePick": {"Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/"}, "StuffDocumentsChain": {"Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/"}, "MapReduceDocumentsChain": {"Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/"}, "ReduceDocumentsChain": {"Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/"}, "AnalyzeDocumentChain": {"Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/"}, "QuerySQLDataBaseTool": {"Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/"}, "OPENAI_TEMPLATE": {"Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "create_openai_data_generator": {"Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "DatasetGenerator": {"Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "create_data_generation_chain": {"Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "create_extraction_chain_pydantic": {"Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}} \ No newline at end of file +{"ChatPromptTemplate": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "Wait for the fine-tuning to complete (this may take some time)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/", "del os.environ['NVIDIA_API_KEY'] ## delete key and reset": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "redis_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "Optionally, specify your own session_state key for storing messages": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "copy from tidb cloud console": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "Install Langchain community and core packages": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/", "open ../../../static/img/brand/wordmark.png as base64 str": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "groq.md": "https://python.langchain.com/v0.2/docs/integrations/chat/groq/", "openai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/openai/", "for running these examples in the notebook:": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "get a chat completion from the formatted messages": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "LangChain supports many other chat models. Here, we're using Ollama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "If api_key is not passed, default behavior is to use the `MISTRAL_API_KEY` environment variable.": "https://python.langchain.com/v0.2/docs/integrations/chat/mistralai/", "ai21.md": "https://python.langchain.com/v0.2/docs/integrations/chat/ai21/", "!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "edenai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/edenai/", "yuan2.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "Loading the COMVEST 2024 notice": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "perplexity.md": "https://python.langchain.com/v0.2/docs/integrations/chat/perplexity/", "using chat invoke": "https://python.langchain.com/v0.2/docs/integrations/chat/upstage/", "Or via the async API": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "Fiddler project and model names, used for model registration": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Must be an OpenAI model that supports functions": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/", "see https://python.langchain.com/en/latest/modules/data_connection/getting_started.html for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Note that we set max_retries = 0 to avoid retrying on RateLimits, etc": "https://python.langchain.com/v0.2/docs/guides/productionization/fallbacks/", "moderation.md": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/moderation/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/prompting/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/quickstart/", "%pip install -qU langchain langchain-community langchain-openai langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/no_queries/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/step_back/", "%pip install -qU langchain langchain-community langchain-openai faker langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/high_cardinality/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/hyde/", "Optional, uncomment to trace runs with LangSmith. Sign up here: https://smith.langchain.com.": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/routing/", "%pip install -qU langchain langchain-openai youtube-transcript-api pytube": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/structuring/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/query_checking/", "Install a model capable of tool calling": "https://python.langchain.com/v0.2/docs/use_cases/extraction/quickstart/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "Set up a parser": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/parse/", "Using LangSmith is recommended but not required. Uncomment below lines to use.": "https://python.langchain.com/v0.2/docs/use_cases/sql/csv/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/index/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "Construct the Tools agent": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/tool_calling/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/", "!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/", "Tool calling {#tool-calling}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/function_calling/", "Prompts": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/prompts-checkpoint/", "openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_functions/", "openai_tools.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_tools/", "This is a prompt template used to format each individual example.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples_chat/", "Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/", "Prompt templates": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/prompt_templates/.ipynb_checkpoints/index-checkpoint/", "custom_llm.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/custom_llm/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "batch_configurable_chain([\"ice cream\", \"spaghetti\", \"dumplings\"])": "https://python.langchain.com/v0.2/docs/expression_language/why/", "> ChatPromptValue(messages=[HumanMessage(content='tell me a short joke about ice cream')])": "https://python.langchain.com/v0.2/docs/expression_language/get_started/", "The input schema of the chain is the input schema of its first part, the prompt.": "https://python.langchain.com/v0.2/docs/expression_language/interface/", "prompt_llm_parser.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_llm_parser/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/", "multiple_chains.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/multiple_chains/", "code_writing.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/code_writing/", "inspect.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/inspect/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/", "decorator.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/decorator/", "Adding values to chain state {#adding-values-to-chain-state}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/assign/", "Binding: Attach runtime args {#binding-attach-runtime-args}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/binding/", "Formatting inputs & output {#formatting-inputs-output}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/parallel/", "Passing data through {#passing-data-through}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/passthrough/", "Run custom functions {#run-custom-functions}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/functions/", "Chaining runnables {#chaining-runnables}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/sequence/"}, "ChatAnthropic": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "open ../../../static/img/brand/wordmark.png as base64 str": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "Define the neural network": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "If this is your first time using playwright, you'll have to install a browser executable.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/quick_start/", "Note that we set max_retries = 0 to avoid retrying on RateLimits, etc": "https://python.langchain.com/v0.2/docs/guides/productionization/fallbacks/", "ANTHROPIC_API_KEY=": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/trajectory_eval/", "The prompt was assigned to the evaluator": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/comparison/pairwise_string/", "This is equivalent to loading using the enum": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/criteria_eval_chain/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/multiple_tools/", "Set up a parser": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/parse/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/xml_agent/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/", "!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/", "streaming.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/streaming/", "structured_output.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/structured_output/", "response_metadata.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/response_metadata/", "The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/", "xml.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/xml/", "This is a prompt template used to format each individual example.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples_chat/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "batch_configurable_chain([\"ice cream\", \"spaghetti\", \"dumplings\"])": "https://python.langchain.com/v0.2/docs/expression_language/why/", "Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/", "Configure chain internals at runtime {#configure-chain-internals-at-runtime}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/configure/", "Chaining runnables {#chaining-runnables}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/sequence/"}, "ChatOpenAI": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "Wait for the fine-tuning to complete (this may take some time)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_dataset/", "re_phrase.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/tools/you/", "fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "outline.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/outline/", "get a token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/retrievers/arxiv/", "Setup API keys for Kay and OpenAI": "https://python.langchain.com/v0.2/docs/integrations/retrievers/sec_filings/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/toolkits/polygon/", "# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "Setup API key": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kay/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/tencentvectordb/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "chatgpt_plugins.md": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "Specify your Connery Runner credentials.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/", "How to use it inside an Agent {#how-to-use-it-inside-an-agent}": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/", "Artifacts are charts created by matplotlib when `plt.show()` is called": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/", "Answer with 'Zhu'": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "How YahooFinanceNewsTool works? {#how-yahoofinancenewstool-works}": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/", "start by installing semanticscholar api": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/max_iterations/", "Extract pdf content": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/", "arxiv.md": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "bash.md": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/", "redis_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Remembrall": "https://python.langchain.com/v0.2/docs/integrations/memory/remembrall/", "Optionally, specify your own session_state key for storing messages": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "copy from tidb cloud console": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "openai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/openai/", "get a chat completion from the formatted messages": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "labelstudio.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "promptlayer.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "trubrics.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "Install necessary dependencies.": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "arthur_tracking.md": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/dataherald-checkpoint/", "Construct the OpenAI Tools agent": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "Create a dataframe": "https://python.langchain.com/v0.2/docs/integrations/toolkits/csv/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "Define the neural network": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "fictional example": "https://python.langchain.com/v0.2/docs/integrations/toolkits/powerbi/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/", "airbyte_structured_qa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/airbyte_structured_qa/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "Note, you can also connect to Spark via Spark connect. For example:": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark_sql/", "IMPORTANT: If you plan to use this account in the future, make sure to save the": "https://python.langchain.com/v0.2/docs/integrations/toolkits/ainetwork/", "cogniswitch.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cogniswitch/", "pandas.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/pandas/", "Install package": "https://python.langchain.com/v0.2/docs/integrations/toolkits/robocorp/", "Authorize connection to your Browser extention": "https://python.langchain.com/v0.2/docs/integrations/toolkits/multion/", "NOTE: In this example. We must set `allow_dangerous_request=True` to enable the OpenAPI Agent to automatically use the Request Tool.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "openai": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Clean up KDB.AI \"documents\" table and index for similarity search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Must be an OpenAI model that supports functions": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "see https://python.langchain.com/en/latest/modules/data_connection/getting_started.html for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "Creating and executing the seeding query": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/", "rdflib_sparql.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/rdflib_sparql/", "connect ngql jupyter extension to nebulagraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/nebula_graph/", "graph.refresh_schema()": "https://python.langchain.com/v0.2/docs/integrations/graphs/kuzu_db/", "diffbot.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/", "feeding the schema using a user construct query": "https://python.langchain.com/v0.2/docs/integrations/graphs/ontotext/", "How many people played in Top Gun?": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "Instantiate ArangoDB Database": "https://python.langchain.com/v0.2/docs/integrations/graphs/arangodb/", "amazon_neptune_open_cypher.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/", "falkordb.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/falkordb/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Debugging": "https://python.langchain.com/v0.2/docs/guides/development/debugging/", "Note that we set max_retries = 0 to avoid retrying on RateLimits, etc": "https://python.langchain.com/v0.2/docs/guides/productionization/fallbacks/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "Reversible data anonymization with Microsoft Presidio {#reversible-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/reversible/", "Download model": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/index/", "Initialize the language model": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/examples/comparisons/", "ANTHROPIC_API_KEY=": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/trajectory_eval/", "custom.md": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/custom/", "Correct": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/scoring_eval_chain/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/prompting/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/quickstart/", "%pip install -qU langchain langchain-community langchain-openai langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/no_queries/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/step_back/", "%pip install -qU langchain langchain-community langchain-openai faker langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/high_cardinality/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/hyde/", "Optional, uncomment to trace runs with LangSmith. Sign up here: https://smith.langchain.com.": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/routing/", "%pip install -qU langchain langchain-openai youtube-transcript-api pytube": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/structuring/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "Install a model capable of tool calling": "https://python.langchain.com/v0.2/docs/use_cases/extraction/quickstart/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "Using LangSmith is recommended but not required. Uncomment below lines to use.": "https://python.langchain.com/v0.2/docs/use_cases/sql/csv/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/index/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/", "This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/", "tools_as_openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/tools/tools_as_openai_functions/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "To enable streaming, we pass in `streaming=True` to the ChatModel constructor": "https://python.langchain.com/v0.2/docs/modules/callbacks/async_callbacks/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/", "pip install wikipedia": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/intermediate_steps/", "need to use GPT-4 here as GPT-3.5 does not understand, however hard you insist, that": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_iter/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/", "!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/", "structured_output.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/structured_output/", "logprobs.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/logprobs/", "response_metadata.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/response_metadata/", "structured.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/structured/", "csv.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/csv/", "Define your desired data structure.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/pydantic/", "retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/", "enum.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/enum/", "openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_functions/", "Solely for documentation purposes.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/pandas_dataframe/", "output_fixing.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/output_fixing/", "openai_tools.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_tools/", "composition.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/composition/", "Prompt templates": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/prompt_templates/.ipynb_checkpoints/index-checkpoint/", "batch_configurable_chain([\"ice cream\", \"spaghetti\", \"dumplings\"])": "https://python.langchain.com/v0.2/docs/expression_language/why/", "The input schema of the chain is the input schema of its first part, the prompt.": "https://python.langchain.com/v0.2/docs/expression_language/interface/", "prompt_llm_parser.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_llm_parser/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/", "multiple_chains.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/multiple_chains/", "code_writing.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/code_writing/", "inspect.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/inspect/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/", "decorator.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/decorator/", "Adding values to chain state {#adding-values-to-chain-state}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/assign/", "Binding: Attach runtime args {#binding-attach-runtime-args}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/binding/", "Formatting inputs & output {#formatting-inputs-output}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/parallel/", "Passing data through {#passing-data-through}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/passthrough/", "Configure chain internals at runtime {#configure-chain-internals-at-runtime}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/configure/", "Run custom functions {#run-custom-functions}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/functions/"}, "SystemMessage": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/chat/fireworks/", "octoai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/octoai/", "service url": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_edge/", "Note that each chunk may contain more than one \"token\"": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/", "Konko {#konko}": "https://python.langchain.com/v0.2/docs/integrations/chat/konko/", "openai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/openai/", "gigachat.md": "https://python.langchain.com/v0.2/docs/integrations/chat/gigachat/", "get a chat completion from the formatted messages": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "Let\u2019s try out LLAMA model offered on EverlyAI Hosted Endpoints {#lets-try-out-llama-model-offered-on-everlyai-hosted-endpoints}": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/", "friendli.md": "https://python.langchain.com/v0.2/docs/integrations/chat/friendli/", "zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "yuan2.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/chat/tongyi/", "Generate your api key from: https://platform.moonshot.cn/console/api-keys": "https://python.langchain.com/v0.2/docs/integrations/chat/moonshot/", "First step is to set up the env variable.": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "Let\u2019s try out each model offered on Anyscale Endpoints {#lets-try-out-each-model-offered-on-anyscale-endpoints}": "https://python.langchain.com/v0.2/docs/integrations/chat/anyscale/", "yandex.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yandex/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "labelstudio.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "trubrics.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Install package": "https://python.langchain.com/v0.2/docs/integrations/toolkits/robocorp/", "Step 4: Embeddings Example {#step-4-embeddings-example}": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/few_shot/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/decomposition/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/quick_start/", "composition.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/composition/", "Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/"}, "HumanMessage": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "azureml_chat_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/", "alibaba_cloud_pai_eas.md": "https://python.langchain.com/v0.2/docs/integrations/chat/alibaba_cloud_pai_eas/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/chat/fireworks/", "octoai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/octoai/", "get a new token: https://deepinfra.com/login?from=%2Fdash": "https://python.langchain.com/v0.2/docs/integrations/chat/deepinfra/", "open ../../../static/img/brand/wordmark.png as base64 str": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "litellm.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/", "service url": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_edge/", "Note that each chunk may contain more than one \"token\"": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/", "Schema": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama_functions/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/chat/tongyi/", "Konko {#konko}": "https://python.langchain.com/v0.2/docs/integrations/chat/konko/", "openai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/openai/", "for running these examples in the notebook:": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "bedrock.md": "https://python.langchain.com/v0.2/docs/integrations/chat/bedrock/", "gigachat.md": "https://python.langchain.com/v0.2/docs/integrations/chat/gigachat/", "get a chat completion from the formatted messages": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "LangChain supports many other chat models. Here, we're using Ollama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "azure_chat_openai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/azure_chat_openai/", "Let\u2019s try out LLAMA model offered on EverlyAI Hosted Endpoints {#lets-try-out-llama-model-offered-on-everlyai-hosted-endpoints}": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/", "gpt_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/", "litellm_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/", "friendli.md": "https://python.langchain.com/v0.2/docs/integrations/chat/friendli/", "If api_key is not passed, default behavior is to use the `MISTRAL_API_KEY` environment variable.": "https://python.langchain.com/v0.2/docs/integrations/chat/mistralai/", "zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "baichuan.md": "https://python.langchain.com/v0.2/docs/integrations/chat/baichuan/", "baidu_qianfan_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/chat/baidu_qianfan_endpoint/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "edenai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/edenai/", "ernie.md": "https://python.langchain.com/v0.2/docs/integrations/chat/ernie/", "tencent_hunyuan.md": "https://python.langchain.com/v0.2/docs/integrations/chat/tencent_hunyuan/", "minimax.md": "https://python.langchain.com/v0.2/docs/integrations/chat/minimax/", "yuan2.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "promptlayer_chatopenai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/promptlayer_chatopenai/", "sparkllm.md": "https://python.langchain.com/v0.2/docs/integrations/chat/sparkllm/", "Generate your api key from: https://platform.moonshot.cn/console/api-keys": "https://python.langchain.com/v0.2/docs/integrations/chat/moonshot/", "dappier.md": "https://python.langchain.com/v0.2/docs/integrations/chat/dappier/", "First step is to set up the env variable.": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "Let\u2019s try out each model offered on Anyscale Endpoints {#lets-try-out-each-model-offered-on-anyscale-endpoints}": "https://python.langchain.com/v0.2/docs/integrations/chat/anyscale/", "yandex.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yandex/", "Or via the async API": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "labelstudio.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "promptlayer.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "trubrics.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "-> content='Hello! How can I assist you today?'": "https://python.langchain.com/v0.2/docs/integrations/providers/databricks/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "arthur_tracking.md": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "google_vertex_ai_palm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "If running a Databricks notebook attached to an interactive cluster in \"single user\"": "https://python.langchain.com/v0.2/docs/integrations/llms/databricks/", "Step 4: Embeddings Example {#step-4-embeddings-example}": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/few_shot/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/decomposition/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "tools_as_openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/tools/tools_as_openai_functions/", "To enable streaming, we pass in `streaming=True` to the ChatModel constructor": "https://python.langchain.com/v0.2/docs/modules/callbacks/async_callbacks/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/quick_start/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_tools/", "Construct the Tools agent": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/tool_calling/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/", "Tool calling {#tool-calling}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/function_calling/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/", "composition.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/composition/", "Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/"}, "RunnableMap": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/parallel/"}, "RunnableLambda": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/parallel/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/", "Run custom functions {#run-custom-functions}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/functions/"}, "MessagesPlaceholder": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "redis_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "Optionally, specify your own session_state key for storing messages": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "copy from tidb cloud console": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/few_shot/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/decomposition/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/agents/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "Install a model capable of tool calling": "https://python.langchain.com/v0.2/docs/use_cases/extraction/quickstart/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/", "Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/"}, "ToolMessage": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "open ../../../static/img/brand/wordmark.png as base64 str": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/few_shot/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/decomposition/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "Tool calling {#tool-calling}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/function_calling/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "tool": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Construct the OpenAI Tools agent": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "jsonformer_experimental.md": "https://python.langchain.com/v0.2/docs/integrations/llms/jsonformer_experimental/", "ANTHROPIC_API_KEY=": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/trajectory_eval/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/agents/", "If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/prompting/", "Import things that are needed generically": "https://python.langchain.com/v0.2/docs/modules/tools/custom_tools/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/", "Tool calling {#tool-calling}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/function_calling/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/"}, "convert_to_openai_tool": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "Function calling": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/function_calling-checkpoint/"}, "TavilySearchResults": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/tools/tavily_search/", "zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/tool_usage/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_tools/", "Construct the Tools agent": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/tool_calling/"}, "format_tool_to_openai_function": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/"}, "BaseMessage": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/few_shot/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/decomposition/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "FunctionMessage": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "AgentAction": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "custom.md": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/custom/", "First, define custom callback handler implementations": "https://python.langchain.com/v0.2/docs/modules/callbacks/multiple_callbacks/"}, "AgentFinish": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "openai_assistants.md": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_assistants/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/"}, "create_openai_functions_agent": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/toolkits/polygon/", "How to use it inside an Agent {#how-to-use-it-inside-an-agent}": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/", "start by installing semanticscholar api": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_functions_agent/", "For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/tools/you/", "Define the neural network": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "Authorize connection to your Browser extention": "https://python.langchain.com/v0.2/docs/integrations/toolkits/multion/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "pip install wikipedia": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/intermediate_steps/", "need to use GPT-4 here as GPT-3.5 does not understand, however hard you insist, that": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_iter/"}, "tracing_v2_enabled": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/"}, "AgentExecutor": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/agents/", "How to use it inside an Agent {#how-to-use-it-inside-an-agent}": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/", "start by installing semanticscholar api": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/max_iterations/", "memorize.md": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "arxiv.md": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/tools/you/", "Based on ReAct Agent": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/streamlit/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/dataherald-checkpoint/", "Construct the OpenAI Tools agent": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "Define the neural network": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/", "Install package": "https://python.langchain.com/v0.2/docs/integrations/toolkits/robocorp/", "Authorize connection to your Browser extention": "https://python.langchain.com/v0.2/docs/integrations/toolkits/multion/", "azure_ai_services.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_ai_services/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/tool_usage/", "agent_with_memory_in_db.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory_in_db/", "agent_with_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Construct the Tools agent": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/tool_calling/", "openai_assistants.md": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_assistants/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/", "pip install wikipedia": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/intermediate_steps/", "need to use GPT-4 here as GPT-3.5 does not understand, however hard you insist, that": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_iter/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/", "!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/"}, "format_to_openai_tool_messages": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/"}, "OpenAIToolsAgentOutputParser": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/"}, "DuckDuckGoSearchResults": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "ddg.md": "https://python.langchain.com/v0.2/docs/integrations/tools/ddg/"}, "AgentType": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "chatgpt_plugins.md": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "Specify your Connery Runner credentials.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/", "use it with langchain {#use-it-with-langchain}": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "google_serper.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "Artifacts are charts created by matplotlib when `plt.show()` is called": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/", "Answer with 'Zhu'": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "How YahooFinanceNewsTool works? {#how-yahoofinancenewstool-works}": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/", "google_finance.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "awslambda.md": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "folder_id='1yucgL9WGgWZdM1TOuKkeghlPizuzMYb5'": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "openweathermap.md": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "memorize.md": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "search_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/search_tools/", "eleven_labs_tts.md": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "Extract pdf content": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/", "get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "graphql.md": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "searchapi.md": "https://python.langchain.com/v0.2/docs/integrations/tools/searchapi/", "edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "bash.md": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/", "Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Connect to Comet if no API Key is set": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "SCENARIO 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "wandb documentation to configure wandb using env variables": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "Setup and use the ClearML Callback": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "Create a dataframe": "https://python.langchain.com/v0.2/docs/integrations/toolkits/csv/", "jira.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "For Windows/Linux": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_cognitive_services/", "Select the LLM to use. Here, we use gpt-3.5-turbo-instruct": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/", "steam.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/", "airbyte_structured_qa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/airbyte_structured_qa/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/", "Copilot Sandbox": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/", "IMPORTANT: If you plan to use this account in the future, make sure to save the": "https://python.langchain.com/v0.2/docs/integrations/toolkits/ainetwork/", "If this is your first time using playwright, you'll have to install a browser executable.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/office365/", "pandas.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/pandas/", "nasa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/", "These are sample parameters for Falcon 40B Instruct Deployed from Amazon SageMaker JumpStart": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/", "Debugging": "https://python.langchain.com/v0.2/docs/guides/development/debugging/", "Using https://huggingface.co/laiyer/deberta-v3-base-prompt-injection": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/hugging_face_prompt_injection/", "Initialize the language model": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/examples/comparisons/", "ANTHROPIC_API_KEY=": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/trajectory_eval/", "First, define custom callback handler implementations": "https://python.langchain.com/v0.2/docs/modules/callbacks/multiple_callbacks/", "need to use GPT-4 here as GPT-3.5 does not understand, however hard you insist, that": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_iter/", "token_usage_tracking.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/token_usage_tracking/"}, "initialize_agent": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "chatgpt_plugins.md": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "Specify your Connery Runner credentials.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/", "use it with langchain {#use-it-with-langchain}": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "google_serper.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "Artifacts are charts created by matplotlib when `plt.show()` is called": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/", "Answer with 'Zhu'": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "How YahooFinanceNewsTool works? {#how-yahoofinancenewstool-works}": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/", "google_finance.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "awslambda.md": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "folder_id='1yucgL9WGgWZdM1TOuKkeghlPizuzMYb5'": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "openweathermap.md": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "memorize.md": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "search_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/search_tools/", "eleven_labs_tts.md": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "Extract pdf content": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/", "get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "graphql.md": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "searchapi.md": "https://python.langchain.com/v0.2/docs/integrations/tools/searchapi/", "gradio_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/gradio_tools/", "sceneXplain.md": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Needed if you would like to display images in the notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "bash.md": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/", "Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "LLM Hyperparameters": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Connect to Comet if no API Key is set": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "SCENARIO 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "wandb documentation to configure wandb using env variables": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "Setup and use the ClearML Callback": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "jira.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "For Windows/Linux": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_cognitive_services/", "Select the LLM to use. Here, we use gpt-3.5-turbo-instruct": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/", "steam.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/", "Copilot Sandbox": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/", "IMPORTANT: If you plan to use this account in the future, make sure to save the": "https://python.langchain.com/v0.2/docs/integrations/toolkits/ainetwork/", "If this is your first time using playwright, you'll have to install a browser executable.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/office365/", "nasa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/", "These are sample parameters for Falcon 40B Instruct Deployed from Amazon SageMaker JumpStart": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/", "Debugging": "https://python.langchain.com/v0.2/docs/guides/development/debugging/", "Using https://huggingface.co/laiyer/deberta-v3-base-prompt-injection": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/hugging_face_prompt_injection/", "Initialize the language model": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/examples/comparisons/", "ANTHROPIC_API_KEY=": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/trajectory_eval/", "First, define custom callback handler implementations": "https://python.langchain.com/v0.2/docs/modules/callbacks/multiple_callbacks/", "need to use GPT-4 here as GPT-3.5 does not understand, however hard you insist, that": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_iter/", "token_usage_tracking.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/token_usage_tracking/"}, "load_tools": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/google-checkpoint/", "chatgpt_plugins.md": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "use it with langchain {#use-it-with-langchain}": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "Answer with 'Zhu'": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "google_finance.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "awslambda.md": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "folder_id='1yucgL9WGgWZdM1TOuKkeghlPizuzMYb5'": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "Each tool wrapps a requests wrapper": "https://python.langchain.com/v0.2/docs/integrations/tools/requests/", "openweathermap.md": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "memorize.md": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "search_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/search_tools/", "eleven_labs_tts.md": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "arxiv.md": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "graphql.md": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "sceneXplain.md": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "Needed if you would like to display images in the notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "LLM Hyperparameters": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Connect to Comet if no API Key is set": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint/", "SerpAPI": "https://python.langchain.com/v0.2/docs/integrations/providers/serpapi/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Golden": "https://python.langchain.com/v0.2/docs/integrations/providers/golden/", "os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "Wolfram Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/wolfram_alpha/", "SCENARIO 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "DataForSEO": "https://python.langchain.com/v0.2/docs/integrations/providers/dataforseo/", "SearxNG Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/searx/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "OpenWeatherMap": "https://python.langchain.com/v0.2/docs/integrations/providers/openweathermap/", "Stack Exchange": "https://python.langchain.com/v0.2/docs/integrations/providers/stackexchange/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "wandb documentation to configure wandb using env variables": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "Setup and use the ClearML Callback": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/dataherald-checkpoint/", "These are sample parameters for Falcon 40B Instruct Deployed from Amazon SageMaker JumpStart": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/", "Debugging": "https://python.langchain.com/v0.2/docs/guides/development/debugging/", "First, define custom callback handler implementations": "https://python.langchain.com/v0.2/docs/modules/callbacks/multiple_callbacks/", "!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/", "token_usage_tracking.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/token_usage_tracking/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/"}, "EvaluatorType": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "This is equivalent to loading using the enum": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/criteria_eval_chain/"}, "RunEvalConfig": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/"}, "arun_on_dataset": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/"}, "run_on_dataset": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/"}, "BaseChatModel": {"Contribute Integrations": "https://python.langchain.com/v0.2/docs/contributing/integrations/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "deprecated": {"Contribute Integrations": "https://python.langchain.com/v0.2/docs/contributing/integrations/"}, "ChatSession": {"Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/"}, "map_ai_messages": {"Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "The file token.json stores the user's access and refresh tokens, and is": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/gmail/"}, "merge_chat_runs": {"Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/"}, "FolderFacebookMessengerChatLoader": {"This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/"}, "SingleFileFacebookMessengerChatLoader": {"This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/"}, "convert_messages_for_finetuning": {"This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "Wait for the fine-tuning to complete (this may take some time)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_dataset/"}, "StrOutputParser": {"This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "del os.environ['NVIDIA_API_KEY'] ## delete key and reset": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tavily/", "LangChain supports many other chat models. Here, we're using Ollama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "Loading the COMVEST 2024 notice": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "Or via the async API": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "Fiddler project and model names, used for model registration": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "From LangChain, import standard modules for prompting.": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/volcengine_maas/", "use default authN method API-key": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "AI21 Contextual Answer {#ai21-contextual-answer}": "https://python.langchain.com/v0.2/docs/integrations/llms/ai21/", "Quickstart": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/.ipynb_checkpoints/quickstart-checkpoint/", "Note that we set max_retries = 0 to avoid retrying on RateLimits, etc": "https://python.langchain.com/v0.2/docs/guides/productionization/fallbacks/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/step_back/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/hyde/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/query_checking/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/quickstart/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/index/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "batch_configurable_chain([\"ice cream\", \"spaghetti\", \"dumplings\"])": "https://python.langchain.com/v0.2/docs/expression_language/why/", "> ChatPromptValue(messages=[HumanMessage(content='tell me a short joke about ice cream')])": "https://python.langchain.com/v0.2/docs/expression_language/get_started/", "The input schema of the chain is the input schema of its first part, the prompt.": "https://python.langchain.com/v0.2/docs/expression_language/interface/", "prompt_llm_parser.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_llm_parser/", "multiple_chains.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/multiple_chains/", "code_writing.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/code_writing/", "Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/", "inspect.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/inspect/", "decorator.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/decorator/", "Adding values to chain state {#adding-values-to-chain-state}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/assign/", "Binding: Attach runtime args {#binding-attach-runtime-args}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/binding/", "Formatting inputs & output {#formatting-inputs-output}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/parallel/", "Passing data through {#passing-data-through}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/passthrough/", "Run custom functions {#run-custom-functions}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/functions/", "Chaining runnables {#chaining-runnables}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/sequence/"}, "convert_message_to_dict": {"Filter out tweets that reference other tweets, because it's a bit weird": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/twitter/"}, "AIMessage": {"Filter out tweets that reference other tweets, because it's a bit weird": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/twitter/", "Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "open ../../../static/img/brand/wordmark.png as base64 str": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "yuan2.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Install required dependencies": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/human_in_the_loop/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/few_shot/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/decomposition/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "Set up a parser": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/parse/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_tools/", "Construct the Tools agent": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/tool_calling/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/", "Tool calling {#tool-calling}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/function_calling/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/", "The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/", "composition.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/composition/", "Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/"}, "convert_pydantic_to_openai_function": {"Wait for the fine-tuning to complete (this may take some time)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/", "openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_functions/"}, "PydanticOutputFunctionsParser": {"Wait for the fine-tuning to complete (this may take some time)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/", "openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_functions/"}, "LangSmithRunChatLoader": {"Wait for the fine-tuning to complete (this may take some time)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/"}, "GMailLoader": {"The file token.json stores the user's access and refresh tokens, and is": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/gmail/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/"}, "SlackChatLoader": {"Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/slack/", "Slack": "https://python.langchain.com/v0.2/docs/integrations/providers/slack/"}, "WhatsAppChatLoader": {"Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/whatsapp/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/", "WhatsApp": "https://python.langchain.com/v0.2/docs/integrations/providers/whatsapp/", "whatsapp_chat.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/whatsapp_chat/"}, "LangSmithDatasetChatLoader": {"Wait for the fine-tuning to complete (this may take some time)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_dataset/"}, "IMessageChatLoader": {"This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/"}, "TelegramChatLoader": {"Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/telegram/", "Telegram": "https://python.langchain.com/v0.2/docs/integrations/providers/telegram/"}, "base": {"Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/"}, "BookendEmbeddings": {"bookend.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/bookend/"}, "HuggingFaceBgeEmbeddings": {"bge_huggingface.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/bge_huggingface/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/"}, "QuantizedBiEncoderEmbeddings": {"optimum_intel.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/optimum_intel/", "Intel": "https://python.langchain.com/v0.2/docs/integrations/providers/intel/"}, "FireworksEmbeddings": {"Using the Embedding Model {#using-the-embedding-model}": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/fireworks/"}, "XinferenceEmbeddings": {"xinference.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/xinference/"}, "LLMRailsEmbeddings": {"llm_rails.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/llm_rails/"}, "DeepInfraEmbeddings": {"sign up for an account: https://deepinfra.com/login?utm_source=langchain": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/deepinfra/", "DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/providers/deepinfra/"}, "HuggingFaceEmbeddings": {"huggingfacehub.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/huggingfacehub/", "Equivalent to SentenceTransformerEmbeddings(model_name=\"all-MiniLM-L6-v2\")": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sentence_transformers/", "Get 3 diff embeddings.": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "Refresh is required for server use": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/", "scann.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "default metric is angular": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "tiledb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/", "%pip install --upgrade --quiet surrealdb langchain langchain-community": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "Ensure that all we need is installed": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/infinispanvs/", "Create collection if running for the first time. If the collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "pairwise_embedding_distance.md": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/comparison/pairwise_embedding_distance/", "You can load by enum or by raw python string": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/embedding_distance/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/", "Get embeddings.": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/long_context_reorder/"}, "HuggingFaceInferenceAPIEmbeddings": {"huggingfacehub.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/huggingfacehub/"}, "HuggingFaceHubEmbeddings": {"huggingfacehub.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/huggingfacehub/", "text_embeddings_inference.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/text_embeddings_inference/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/"}, "GoogleGenerativeAIEmbeddings": {"google_generative_ai.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/google_generative_ai/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/"}, "GPT4AllEmbeddings": {"gpt4all.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/gpt4all/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/"}, "MosaicMLInstructorEmbeddings": {"sign up for an account: https://forms.mosaicml.com/demo?utm_source=langchain": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/mosaicml/"}, "QuantizedBgeEmbeddings": {"itrex.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/itrex/", "Intel": "https://python.langchain.com/v0.2/docs/integrations/providers/intel/"}, "OpenAIEmbeddings": {"openai.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/openai/", "set the environment variables needed for openai package to know to reach out to azure": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/azureopenai/", "azure_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "re_phrase.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/", "Establishing a connection to the database is facilitated through the singlestoredb Python connector.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/", "knn.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/knn/", "initialize the index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/docarray_retriever/", "svm.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/svm/", "create the index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/pinecone_hybrid_search/", "# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "Get 3 diff embeddings.": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "or install latest:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/", "Get openAI api key by reading local .env file": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "create new index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "in case if some queries fail consider installing libdeeplake manually": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "import os": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Here we want to make sure the answer is minimally relevant": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "lancedb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "databricks_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/", "xata.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/", "openai": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "connection to redis standalone at localhost, db 0, no password": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/", "output length: 4": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/", "replace": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/", "Set up the OpenAI Environment Variables": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "vikingdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/", "Wait until the cluster is ready for use.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/", "typesense.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "Here we useimport getpass": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/", "or shorter": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "Pip install necessary package {#pip-install-necessary-package}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "import": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/", "duckdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/", "for example": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/", "# if you plan to use bson serialization, install also:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "Get an OpenAI token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/", "use directly a `where_str` to delete": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/", "clickhouse.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/", "qdrant.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/qdrant/", "tigris.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/", "ecloud_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/", "with pip": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/", "If using the default Docker installation, use this instantiation instead:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/", "pinecone.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pinecone/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "Option 1: use an OpenAI account": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "usearch.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/", "Clean up KDB.AI \"documents\" table and index for similarity search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "Metadata {#metadata}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "epsilla.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/", "DocumentDB connection string": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "analyticdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/", "hologres.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/", "initialize MongoDB python client": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "Use Meilisearch vector store to store texts & associated embeddings as vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Uncomment this to install psychicapi if you don't already have it installed": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/", "Quickstart": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/.ipynb_checkpoints/quickstart-checkpoint/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/quickstart/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/quickstart/", "%pip install -qU langchain langchain-community langchain-openai langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/no_queries/", "%pip install -qU langchain langchain-community langchain-openai faker langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/high_cardinality/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/", "indexing.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/indexing/", "Text embedding models": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/index/", "Swapping the `ByteStore` {#swapping-the-bytestore}": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/caching_embeddings/", "initialize the bm25 retriever and faiss retriever": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/ensemble/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "Define your embedding model": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/time_weighted_vectorstore/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/", "This text splitter is used to create the child documents": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/parent_document_retriever/", "vectorstore.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/vectorstore/", "This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/", "Self-querying": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/self_query-checkpoint/", "Vector stores": "https://python.langchain.com/v0.2/docs/modules/data_connection/vectorstores/index/", "This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/semantic-chunker/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/", "adding_memory_chain_multiple_inputs.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory_chain_multiple_inputs/", "Backed by a Vector Store": "https://python.langchain.com/v0.2/docs/modules/memory/types/vectorstore_retriever_memory/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/", "Select the most similar example to the input.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples/", "This is a prompt template used to format each individual example.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples_chat/", "Examples of a pretend task of creating antonyms.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/similarity/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "> ChatPromptValue(messages=[HumanMessage(content='tell me a short joke about ice cream')])": "https://python.langchain.com/v0.2/docs/expression_language/get_started/", "The input schema of the chain is the input schema of its first part, the prompt.": "https://python.langchain.com/v0.2/docs/expression_language/interface/", "Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/", "inspect.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/inspect/", "Adding values to chain state {#adding-values-to-chain-state}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/assign/", "Formatting inputs & output {#formatting-inputs-output}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/parallel/", "Passing data through {#passing-data-through}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/passthrough/"}, "VertexAIEmbeddings": {"google_vertex_ai_palm.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/google_vertex_ai_palm/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_cloud_sql_pg/", "TODO : Set values as per your requirements": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "@markdown Please specify a source for demo purpose.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_firestore/"}, "BedrockEmbeddings": {"async embed query": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/bedrock/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "GigaChatEmbeddings": {"gigachat.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/gigachat/", "Salute Devices": "https://python.langchain.com/v0.2/docs/integrations/providers/salute_devices/"}, "OllamaEmbeddings": {"ollama.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ollama/", "Ollama": "https://python.langchain.com/v0.2/docs/integrations/providers/ollama/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/"}, "OCIGenAIEmbeddings": {"use default authN method API-key": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/"}, "FastEmbedEmbeddings": {"fastembed.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/fastembed/"}, "LlamaCppEmbeddings": {"llamacpp.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/llamacpp/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/providers/llamacpp/"}, "NLPCloudEmbeddings": {"nlp_cloud.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nlp_cloud/", "NLPCloud": "https://python.langchain.com/v0.2/docs/integrations/providers/nlpcloud/"}, "LaserEmbeddings": {"Ex Instantiationz": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/laser/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/"}, "OpenCLIPEmbeddings": {"Image URIs": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/open_clip/", "Establishing a connection to the database is facilitated through the singlestoredb Python connector.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/"}, "TitanTakeoffEmbed": {"Model config for the embedding model, where you can specify the following parameters:": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/titan_takeoff/"}, "MistralAIEmbeddings": {"pip install -U langchain-mistralai": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/mistralai/", "mistralai.md": "https://python.langchain.com/v0.2/docs/integrations/providers/mistralai/"}, "SpacyEmbeddings": {"spacy_embedding.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/spacy_embedding/", "spaCy": "https://python.langchain.com/v0.2/docs/integrations/providers/spacy/"}, "BaichuanTextEmbeddings": {"baichuan.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/baichuan/", "Baichuan": "https://python.langchain.com/v0.2/docs/integrations/providers/baichuan/"}, "TogetherEmbeddings": {"install package": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/together/", "together.md": "https://python.langchain.com/v0.2/docs/integrations/providers/together/"}, "HuggingFaceInstructEmbeddings": {"instruct_embeddings.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/instruct_embeddings/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/"}, "QianfanEmbeddingsEndpoint": {"baidu_qianfan_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/baidu_qianfan_endpoint/", "ernie.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ernie/", "Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/", "Create a bes instance and index docs.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/"}, "CohereEmbeddings": {"cohere.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/cohere/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "See docker command above to launch a postgres instance with pgvector enabled.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvector/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "Text embedding models": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/index/"}, "EdenAiEmbeddings": {"edenai.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/edenai/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "JohnSnowLabsEmbeddings": {"If you have a enterprise license, you can run this to install enterprise features": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/johnsnowlabs_embedding/"}, "ErnieEmbeddings": {"ernie.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ernie/"}, "LLMChain": {"Dependencies {#dependencies}": "https://python.langchain.com/v0.2/docs/integrations/llms/clarifai/", "re_phrase.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "memorize.md": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "Needed if you would like to display images in the notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "loads previous state from Mot\u00f6rhead \ud83e\udd18": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/", "!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "LLM Hyperparameters": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "!pip3 install rebuff openai -U": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Prediction Guard": "https://python.langchain.com/v0.2/docs/integrations/providers/predictionguard/", "SCENARIO 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Shale Protocol": "https://python.langchain.com/v0.2/docs/integrations/providers/shaleprotocol/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "0: Import ray serve and request from starlette": "https://python.langchain.com/v0.2/docs/integrations/providers/ray_serve/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/minimax/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "stochasticai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/stochasticai/", "solar.md": "https://python.langchain.com/v0.2/docs/integrations/llms/solar/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "Update Langchain": "https://python.langchain.com/v0.2/docs/integrations/llms/ipex_llm/", "Install the package https://docs.banana.dev/banana-docs/core-concepts/sdks/python": "https://python.langchain.com/v0.2/docs/integrations/llms/banana/", "alibabacloud_pai_eas_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/llms/alibabacloud_pai_eas_endpoint/", "openllm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/openllm/", "octoai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/octoai/", "If you get an error, probably, you need to set up the \"base_url\" parameter that can be taken from the error log.": "https://python.langchain.com/v0.2/docs/integrations/llms/writer/", "Register an account with Modal and get a new token.": "https://python.langchain.com/v0.2/docs/integrations/llms/modal/", "textgen.md": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "xinference.md": "https://python.langchain.com/v0.2/docs/integrations/llms/xinference/", "symblai_nebula.md": "https://python.langchain.com/v0.2/docs/integrations/llms/symblai_nebula/", "get a new token: https://deepinfra.com/login?from=%2Fdash": "https://python.langchain.com/v0.2/docs/integrations/llms/deepinfra/", "get a token: https://docs.nlpcloud.com/#authentication": "https://python.langchain.com/v0.2/docs/integrations/llms/nlpcloud/", "Callbacks support token-wise streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/gpt4all/", "get a new token: https://docs.forefront.ai/forefront/api-reference/authentication": "https://python.langchain.com/v0.2/docs/integrations/llms/forefrontai/", "sign up for an account: https://forms.mosaicml.com/demo?utm_source=langchain": "https://python.langchain.com/v0.2/docs/integrations/llms/mosaicml/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/pipelineai/", "get a token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/llms/openai/", "gigachat.md": "https://python.langchain.com/v0.2/docs/integrations/llms/gigachat/", "use default authN method API-key": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "%pip list | grep aphrodite": "https://python.langchain.com/v0.2/docs/integrations/llms/aphrodite/", "Run the chain specifying only the input variable for the first chain.": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "Optional, add your OpenAI API Key. This is optional, as Prediction Guard allows": "https://python.langchain.com/v0.2/docs/integrations/llms/predictionguard/", "Step 4: Embeddings Example {#step-4-embeddings-example}": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "Calling a single prompt": "https://python.langchain.com/v0.2/docs/integrations/llms/ibm_watsonx/", "ctransformers.md": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/", "vllm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/", "azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/", "default infer_api for a local deployed Yuan2.0 inference server": "https://python.langchain.com/v0.2/docs/integrations/llms/yuan2/", "get a token: https://huggingface.co/docs/api-inference/quicktour#get-your-api-token": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/", "For an on-demand A100 with GCP, Azure, or Lambda": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/", "anyscale.md": "https://python.langchain.com/v0.2/docs/integrations/llms/anyscale/", "yandex.md": "https://python.langchain.com/v0.2/docs/integrations/llms/yandex/", "gooseai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/gooseai/", "Uncomment to install openlm and openai if you haven't already": "https://python.langchain.com/v0.2/docs/integrations/llms/openlm/", "Using streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/cloudflare_workersai/", "conversation can take several minutes": "https://python.langchain.com/v0.2/docs/integrations/llms/ctranslate2/", "Install required dependencies": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/", "Improve the results by fine-tuning (optional) {#improve-the-results-by-fine-tuning-optional}": "https://python.langchain.com/v0.2/docs/integrations/llms/gradient/", "this can take several minutes to download big files!": "https://python.langchain.com/v0.2/docs/integrations/llms/petals/", "magics to auto-reload external modules in case you are making changes to langchain while working on this notebook": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/", "Logical Fallacy chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/logical_fallacy_chain/", "Constitutional chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/constitutional_chain/", "custom.md": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/custom/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/apis/", "Get embeddings.": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/long_context_reorder/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "Callbacks": "https://python.langchain.com/v0.2/docs/modules/callbacks/index/", "composition.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/composition/"}, "ClarifaiEmbeddings": {"Dependencies {#dependencies}": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/clarifai/", "Clarifai": "https://python.langchain.com/v0.2/docs/integrations/providers/clarifai/"}, "PromptTemplate": {"Dependencies {#dependencies}": "https://python.langchain.com/v0.2/docs/integrations/llms/clarifai/", "re_phrase.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "folder_id='1yucgL9WGgWZdM1TOuKkeghlPizuzMYb5'": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_drive/", "get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "send data into the chain": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/", "and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "Needed if you would like to display images in the notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "loads previous state from Mot\u00f6rhead \ud83e\udd18": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/", "context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "LLM Hyperparameters": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "From LangChain, import standard modules for prompting.": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "!pip3 install rebuff openai -U": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Prediction Guard": "https://python.langchain.com/v0.2/docs/integrations/providers/predictionguard/", "SCENARIO 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Shale Protocol": "https://python.langchain.com/v0.2/docs/integrations/providers/shaleprotocol/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "0: Import ray serve and request from starlette": "https://python.langchain.com/v0.2/docs/integrations/providers/ray_serve/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "DocumentDB connection string": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "initialize MongoDB python client": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "airbyte.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte/", "Creating and executing the seeding query": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/", "How many people played in Top Gun?": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/minimax/", "stochasticai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/stochasticai/", "solar.md": "https://python.langchain.com/v0.2/docs/integrations/llms/solar/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "Update Langchain": "https://python.langchain.com/v0.2/docs/integrations/llms/ipex_llm/", "Install the package https://docs.banana.dev/banana-docs/core-concepts/sdks/python": "https://python.langchain.com/v0.2/docs/integrations/llms/banana/", "alibabacloud_pai_eas_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/llms/alibabacloud_pai_eas_endpoint/", "openllm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/openllm/", "sagemaker.md": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/", "octoai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/octoai/", "If you get an error, probably, you need to set up the \"base_url\" parameter that can be taken from the error log.": "https://python.langchain.com/v0.2/docs/integrations/llms/writer/", "Register an account with Modal and get a new token.": "https://python.langchain.com/v0.2/docs/integrations/llms/modal/", "textgen.md": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "xinference.md": "https://python.langchain.com/v0.2/docs/integrations/llms/xinference/", "symblai_nebula.md": "https://python.langchain.com/v0.2/docs/integrations/llms/symblai_nebula/", "get a new token: https://deepinfra.com/login?from=%2Fdash": "https://python.langchain.com/v0.2/docs/integrations/llms/deepinfra/", "anthropic.md": "https://python.langchain.com/v0.2/docs/integrations/llms/anthropic/", "get a token: https://docs.nlpcloud.com/#authentication": "https://python.langchain.com/v0.2/docs/integrations/llms/nlpcloud/", "Callbacks support token-wise streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "get a new token: https://docs.forefront.ai/forefront/api-reference/authentication": "https://python.langchain.com/v0.2/docs/integrations/llms/forefrontai/", "sign up for an account: https://forms.mosaicml.com/demo?utm_source=langchain": "https://python.langchain.com/v0.2/docs/integrations/llms/mosaicml/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/pipelineai/", "get a token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/llms/openai/", "google_vertex_ai_palm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "gigachat.md": "https://python.langchain.com/v0.2/docs/integrations/llms/gigachat/", "use default authN method API-key": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "huggingface_pipelines.md": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_pipelines/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Note importing TitanTakeoffPro instead of TitanTakeoff will work as well both use same object under the hood": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/", "%pip list | grep aphrodite": "https://python.langchain.com/v0.2/docs/integrations/llms/aphrodite/", "AI21 Contextual Answer {#ai21-contextual-answer}": "https://python.langchain.com/v0.2/docs/integrations/llms/ai21/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/llms/cohere/", "Run the chain specifying only the input variable for the first chain.": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "Optional, add your OpenAI API Key. This is optional, as Prediction Guard allows": "https://python.langchain.com/v0.2/docs/integrations/llms/predictionguard/", "Step 4: Embeddings Example {#step-4-embeddings-example}": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "Calling a single prompt": "https://python.langchain.com/v0.2/docs/integrations/llms/ibm_watsonx/", "ctransformers.md": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/", "vllm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/", "azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/", "Map reduce example": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/", "get a token: https://huggingface.co/docs/api-inference/quicktour#get-your-api-token": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/", "mlx_pipelines.md": "https://python.langchain.com/v0.2/docs/integrations/llms/mlx_pipelines/", "For an on-demand A100 with GCP, Azure, or Lambda": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/", "anyscale.md": "https://python.langchain.com/v0.2/docs/integrations/llms/anyscale/", "yandex.md": "https://python.langchain.com/v0.2/docs/integrations/llms/yandex/", "gooseai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/gooseai/", "Uncomment to install openlm and openai if you haven't already": "https://python.langchain.com/v0.2/docs/integrations/llms/openlm/", "Using streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/cloudflare_workersai/", "conversation can take several minutes": "https://python.langchain.com/v0.2/docs/integrations/llms/ctranslate2/", "google_ai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_ai/", "Install required dependencies": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/", "Improve the results by fine-tuning (optional) {#improve-the-results-by-fine-tuning-optional}": "https://python.langchain.com/v0.2/docs/integrations/llms/gradient/", "this can take several minutes to download big files!": "https://python.langchain.com/v0.2/docs/integrations/llms/petals/", "openvino.md": "https://python.langchain.com/v0.2/docs/integrations/llms/openvino/", "weight_only_quantization.md": "https://python.langchain.com/v0.2/docs/integrations/llms/weight_only_quantization/", "magics to auto-reload external modules in case you are making changes to langchain while working on this notebook": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/", "Note that we set max_retries = 0 to avoid retrying on RateLimits, etc": "https://python.langchain.com/v0.2/docs/guides/productionization/fallbacks/", "Logical Fallacy chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/logical_fallacy_chain/", "Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/", "Constitutional chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/constitutional_chain/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "Reversible data anonymization with Microsoft Presidio {#reversible-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/reversible/", "Download model": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/index/", "The prompt was assigned to the evaluator": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/comparison/pairwise_string/", "This is equivalent to loading using the enum": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/criteria_eval_chain/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "Get embeddings.": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/long_context_reorder/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "!python -m spacy download en_core_web_lg": "https://python.langchain.com/v0.2/docs/modules/memory/custom_memory/", "adding_memory_chain_multiple_inputs.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory_chain_multiple_inputs/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "Combined": "https://python.langchain.com/v0.2/docs/modules/memory/multiple_memory/", "Here it is by default set to \"AI\"": "https://python.langchain.com/v0.2/docs/modules/memory/conversational_customization/", "kg.md": "https://python.langchain.com/v0.2/docs/modules/memory/types/kg/", "Backed by a Vector Store": "https://python.langchain.com/v0.2/docs/modules/memory/types/vectorstore_retriever_memory/", "Callbacks": "https://python.langchain.com/v0.2/docs/modules/callbacks/index/", "this chain will both print to stdout (because verbose=True) and write to 'output.log'": "https://python.langchain.com/v0.2/docs/modules/callbacks/filecallbackhandler/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/", "Prompts": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/prompts-checkpoint/", "Define your desired data structure.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/pydantic/", "structured.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/structured/", "csv.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/csv/", "retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/", "enum.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/enum/", "datetime.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/datetime/", "Solely for documentation purposes.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/pandas_dataframe/", "xml.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/xml/", "Select the most similar example to the input.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples/", "partial.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/partial/", "composition.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/composition/", "Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/", "Examples of a pretend task of creating antonyms.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/similarity/", "index.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/index/", "Examples of a fictional translation task.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/ngram_overlap/", "Prompt templates": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/prompt_templates/.ipynb_checkpoints/index-checkpoint/", "Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/", "Configure chain internals at runtime {#configure-chain-internals-at-runtime}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/configure/"}, "AzureOpenAIEmbeddings": {"set the environment variables needed for openai package to know to reach out to azure": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/azureopenai/", "Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "Option 1: use an OpenAI account": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/"}, "InfinityEmbeddings": {"Option 1: Use infinity from Python {#option-1-use-infinity-from-python}": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/infinity/", "Infinity": "https://python.langchain.com/v0.2/docs/integrations/providers/infinity/"}, "InfinityEmbeddingsLocal": {"Option 1: Use infinity from Python {#option-1-use-infinity-from-python}": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/infinity/"}, "AwaEmbeddings": {"pip install awadb": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/awadb/", "AwaDB": "https://python.langchain.com/v0.2/docs/integrations/providers/awadb/"}, "VolcanoEmbeddings": {"volcengine.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/volcengine/"}, "MiniMaxEmbeddings": {"minimax.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/minimax/", "Minimax": "https://python.langchain.com/v0.2/docs/integrations/providers/minimax/"}, "FakeEmbeddings": {"fake.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/fake/", "initialize the index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/docarray_retriever/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vectara/", "drop first if index already exists": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_memorystore_redis/", "Run tests with shell:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "baiduvectordb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/"}, "NeMoEmbeddings": {"nemo.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nemo/"}, "NomicEmbeddings": {"install package": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nomic/", "nomic.md": "https://python.langchain.com/v0.2/docs/integrations/providers/nomic/"}, "SparkLLMTextEmbeddings": {"sparkllm.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sparkllm/"}, "PremAIEmbeddings": {"Let's start by doing some imports and define our embedding object": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/premai/"}, "ElasticsearchEmbeddings": {"Define the model ID": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/elasticsearch/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/providers/elasticsearch/"}, "VoyageAIEmbeddings": {"retrieve the most relevant documents": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/voyageai/", "VoyageAI": "https://python.langchain.com/v0.2/docs/integrations/providers/voyageai/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/"}, "KNNRetriever": {"retrieve the most relevant documents": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/voyageai/", "knn.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/knn/"}, "SelfHostedEmbeddings": {"For an on-demand A100 with GCP, Azure, or Lambda": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/self-hosted/"}, "SelfHostedHuggingFaceEmbeddings": {"For an on-demand A100 with GCP, Azure, or Lambda": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/self-hosted/"}, "SelfHostedHuggingFaceInstructEmbeddings": {"For an on-demand A100 with GCP, Azure, or Lambda": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/self-hosted/"}, "AnyscaleEmbeddings": {"anyscale.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/anyscale/", "Anyscale": "https://python.langchain.com/v0.2/docs/integrations/providers/anyscale/"}, "EmbaasEmbeddings": {"Set API key": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/embaas/"}, "YandexGPTEmbeddings": {"yandex.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/yandex/"}, "JinaEmbeddings": {"jina.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/jina/", "Jina": "https://python.langchain.com/v0.2/docs/integrations/providers/jina/"}, "AlephAlphaAsymmetricSemanticEmbedding": {"aleph_alpha.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/aleph_alpha/", "Aleph Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/aleph_alpha/"}, "AlephAlphaSymmetricSemanticEmbedding": {"aleph_alpha.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/aleph_alpha/", "Aleph Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/aleph_alpha/"}, "CloudflareWorkersAIEmbeddings": {"single string embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/cloudflare_workersai/", "Cloudflare": "https://python.langchain.com/v0.2/docs/integrations/providers/cloudflare/"}, "DashScopeEmbeddings": {"dashscope.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/dashscope/", "create DashVector collection": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "add texts": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/"}, "TensorflowHubEmbeddings": {"tensorflowhub.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/tensorflowhub/"}, "LlamafileEmbeddings": {"llamafile setup": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/llamafile/"}, "GradientEmbeddings": {"(demo) compute similarity": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/gradient/", "Gradient": "https://python.langchain.com/v0.2/docs/integrations/providers/gradient/"}, "ModelScopeEmbeddings": {"modelscope_hub.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/modelscope_hub/", "ModelScope": "https://python.langchain.com/v0.2/docs/integrations/providers/modelscope/"}, "SagemakerEndpointEmbeddings": {"client = boto3.client(": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sagemaker-endpoint/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "EmbeddingsContentHandler": {"client = boto3.client(": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sagemaker-endpoint/"}, "DocArrayInMemorySearch": {"async embed query": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/upstage/", "Get an OpenAI token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/", "> ChatPromptValue(messages=[HumanMessage(content='tell me a short joke about ice cream')])": "https://python.langchain.com/v0.2/docs/expression_language/get_started/"}, "OpenVINOEmbeddings": {"openvino.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/openvino/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/"}, "OpenVINOBgeEmbeddings": {"openvino.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/openvino/"}, "NVIDIAEmbeddings": {"del os.environ['NVIDIA_API_KEY'] ## delete key and reset": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "NVIDIA": "https://python.langchain.com/v0.2/docs/integrations/providers/nvidia/"}, "FAISS": {"del os.environ['NVIDIA_API_KEY'] ## delete key and reset": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "use default authN method API-key": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/", "Swapping the `ByteStore` {#swapping-the-bytestore}": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/caching_embeddings/", "initialize the bm25 retriever and faiss retriever": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/ensemble/", "Define your embedding model": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/time_weighted_vectorstore/", "vectorstore.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/vectorstore/", "Vector stores": "https://python.langchain.com/v0.2/docs/modules/data_connection/vectorstores/index/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/", "Backed by a Vector Store": "https://python.langchain.com/v0.2/docs/modules/memory/types/vectorstore_retriever_memory/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Examples of a pretend task of creating antonyms.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/mmr/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "The input schema of the chain is the input schema of its first part, the prompt.": "https://python.langchain.com/v0.2/docs/expression_language/interface/", "inspect.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/inspect/", "Adding values to chain state {#adding-values-to-chain-state}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/assign/", "Formatting inputs & output {#formatting-inputs-output}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/parallel/", "Passing data through {#passing-data-through}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/passthrough/"}, "RunnablePassthrough": {"del os.environ['NVIDIA_API_KEY'] ## delete key and reset": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "From LangChain, import standard modules for prompting.": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "use default authN method API-key": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/prompting/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "Quickstart": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/.ipynb_checkpoints/quickstart-checkpoint/", "%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/quickstart/", "%pip install -qU langchain langchain-community langchain-openai langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/no_queries/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/step_back/", "%pip install -qU langchain langchain-community langchain-openai faker langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/high_cardinality/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/hyde/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/quickstart/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/index/", "Tool calling {#tool-calling}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/function_calling/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "batch_configurable_chain([\"ice cream\", \"spaghetti\", \"dumplings\"])": "https://python.langchain.com/v0.2/docs/expression_language/why/", "> ChatPromptValue(messages=[HumanMessage(content='tell me a short joke about ice cream')])": "https://python.langchain.com/v0.2/docs/expression_language/get_started/", "The input schema of the chain is the input schema of its first part, the prompt.": "https://python.langchain.com/v0.2/docs/expression_language/interface/", "prompt_llm_parser.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_llm_parser/", "multiple_chains.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/multiple_chains/", "Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/", "inspect.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/inspect/", "Adding values to chain state {#adding-values-to-chain-state}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/assign/", "Binding: Attach runtime args {#binding-attach-runtime-args}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/binding/", "Formatting inputs & output {#formatting-inputs-output}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/parallel/", "Passing data through {#passing-data-through}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/passthrough/"}, "ChatNVIDIA": {"del os.environ['NVIDIA_API_KEY'] ## delete key and reset": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "Or via the async API": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "NVIDIA": "https://python.langchain.com/v0.2/docs/integrations/providers/nvidia/"}, "LocalAIEmbeddings": {"if you are behind an explicit proxy, you can use the OPENAI_PROXY environment variable to pass through": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/localai/"}, "AzureAISearchRetriever": {"azure_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/"}, "DirectoryLoader": {"azure_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "File Directory": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/file_directory/"}, "TextLoader": {"azure_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "Establishing a connection to the database is facilitated through the singlestoredb Python connector.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "Here we want to make sure the answer is minimally relevant": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "lancedb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/", "You need to install sqlite-vss as a dependency.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/", "Refresh is required for server use": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "add texts": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/", "databricks_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/", "scann.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "xata.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/", "openai": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "docs[0].metadata[\"id\"] == \"id:testapp:testapp::32\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/", "output length: 4": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/", "or install latest:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/", "replace": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/", "Set up the OpenAI Environment Variables": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "vikingdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/", "default metric is angular": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Wait until the cluster is ready for use.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/", "typesense.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/", "Here we useimport getpass": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/", "atlas.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/atlas/", "or shorter": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "Load the document and split it into chunks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vlite/", "Pip install necessary package {#pip-install-necessary-package}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "drop first if index already exists": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/", "import": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/", "duckdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/", "for example": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/", "Dependencies {#dependencies}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clarifai/", "# if you plan to use bson serialization, install also:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "Get an OpenAI token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/", "use directly a `where_str` to delete": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/", "tiledb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/", "clickhouse.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_memorystore_redis/", "qdrant.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/qdrant/", "tigris.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/", "ecloud_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/", "Create a bes instance and index docs.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/", "awadb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/awadb/", "with pip": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/", "%pip install --upgrade --quiet surrealdb langchain langchain-community": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/", "If using the default Docker installation, use this instantiation instead:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/", "pinecone.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pinecone/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/", "create cluster and add texts": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bageldb/", "Option 1: use an OpenAI account": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/", "usearch.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/milvus/", "Metadata {#metadata}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "Run tests with shell:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "initialize marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "epsilla.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/", "DocumentDB connection string": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "Create collection if running for the first time. If the collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/", "analyticdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/", "hologres.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/", "baiduvectordb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/", "Use Meilisearch vector store to store texts & associated embeddings as vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Swapping the `ByteStore` {#swapping-the-bytestore}": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/caching_embeddings/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "This text splitter is used to create the child documents": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/parent_document_retriever/", "vectorstore.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/vectorstore/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/", "Vector stores": "https://python.langchain.com/v0.2/docs/modules/data_connection/vectorstores/index/", "Document loaders": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/index/", "File Directory": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/file_directory/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/"}, "TokenTextSplitter": {"azure_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/split_by_token/"}, "AzureSearch": {"azure_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "Option 1: use an OpenAI account": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/"}, "RePhraseQueryRetriever": {"re_phrase.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/"}, "WebBaseLoader": {"re_phrase.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "Install necessary dependencies.": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "Collection config is needed if we're creating a new Zep Collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/", "merge_doc.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/merge_doc/", "Use this piece of code for testing new custom BeautifulSoup parsers": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/web_base/", "Quickstart": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/.ipynb_checkpoints/quickstart-checkpoint/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/quickstart/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/"}, "RecursiveCharacterTextSplitter": {"re_phrase.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Loading the COMVEST 2024 notice": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "TODO : Set values as per your requirements": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "vikingdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Collection config is needed if we're creating a new Zep Collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/", "initialize MongoDB python client": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Code for: class MyClass:": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/", "Quickstart": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/.ipynb_checkpoints/quickstart-checkpoint/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/quickstart/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/quickstart/", "%pip install -qU langchain langchain-community langchain-openai langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/no_queries/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/", "This text splitter is used to create the child documents": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/parent_document_retriever/", "Full list of supported languages": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/code_splitter/", "This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/split_by_token/", "for local file use html_splitter.split_text_from_file()": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/HTML_header_metadata/", "MD splits": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/markdown_header_metadata/", "Split": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/HTML_section_aware_splitter/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/"}, "YouSearchAPIWrapper": {"For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/tools/you/"}, "YouRetriever": {"For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/"}, "Jaguar": {"cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Jaguar": "https://python.langchain.com/v0.2/docs/integrations/providers/jaguar/"}, "CharacterTextSplitter": {"cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Establishing a connection to the database is facilitated through the singlestoredb Python connector.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "Here we want to make sure the answer is minimally relevant": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "lancedb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/", "You need to install sqlite-vss as a dependency.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/", "Refresh is required for server use": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "add texts": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/", "databricks_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/", "scann.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "xata.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/", "openai": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "docs[0].metadata[\"id\"] == \"id:testapp:testapp::32\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/", "output length: 4": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/", "or install latest:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/", "replace": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/", "Set up the OpenAI Environment Variables": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "default metric is angular": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Wait until the cluster is ready for use.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/", "typesense.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/", "Here we useimport getpass": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/", "or shorter": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "Load the document and split it into chunks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vlite/", "Pip install necessary package {#pip-install-necessary-package}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "drop first if index already exists": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/", "import": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/", "duckdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/", "for example": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/", "Dependencies {#dependencies}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clarifai/", "# if you plan to use bson serialization, install also:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "Get an OpenAI token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/", "use directly a `where_str` to delete": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/", "tiledb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/", "clickhouse.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_memorystore_redis/", "qdrant.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/qdrant/", "tigris.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/", "ecloud_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/", "Create a bes instance and index docs.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/", "awadb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/awadb/", "with pip": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/", "%pip install --upgrade --quiet surrealdb langchain langchain-community": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/", "If using the default Docker installation, use this instantiation instead:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/", "pinecone.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pinecone/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "create cluster and add texts": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bageldb/", "Option 1: use an OpenAI account": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/", "usearch.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/milvus/", "Metadata {#metadata}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "Run tests with shell:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "initialize marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "epsilla.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/", "DocumentDB connection string": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "Create collection if running for the first time. If the collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/", "analyticdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/", "hologres.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/", "baiduvectordb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/", "Use Meilisearch vector store to store texts & associated embeddings as vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/", "Uncomment this to install psychicapi if you don't already have it installed": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "Map reduce example": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "indexing.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/indexing/", "Swapping the `ByteStore` {#swapping-the-bytestore}": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/caching_embeddings/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/", "vectorstore.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/vectorstore/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/", "Vector stores": "https://python.langchain.com/v0.2/docs/modules/data_connection/vectorstores/index/", "This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/split_by_token/", "adding_memory_chain_multiple_inputs.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory_chain_multiple_inputs/"}, "MultiVectorRetriever": {"fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/"}, "Document": {"fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "STEP 1: Load": "https://python.langchain.com/v0.2/docs/integrations/retrievers/chatgpt-plugin/", "cohere.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere/", "client.schema.delete_all()": "https://python.langchain.com/v0.2/docs/integrations/retrievers/weaviate-hybrid/", "bm25.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bm25/", "Create a retriever with a demo encoder": "https://python.langchain.com/v0.2/docs/integrations/retrievers/qdrant-sparse/", "elasticsearch_retriever.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/", "tf_idf.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tf_idf/", "This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "create DashVector collection": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "or install latest:": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "Get openAI api key by reading local .env file": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "create new index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "in case if some queries fail consider installing libdeeplake manually": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "import os": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "apify.md": "https://python.langchain.com/v0.2/docs/integrations/tools/apify/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "See docker command above to launch a postgres instance with pgvector enabled.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvector/", "default metric is angular": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Pip install necessary package {#pip-install-necessary-package}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "@markdown Please specify a source for demo purpose.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_firestore/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/milvus/", "Metadata {#metadata}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "Run tests with shell:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "nuclia_transformer.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/nuclia_transformer/", "ai21_semantic_text_splitter.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/ai21_semantic_text_splitter/", "Must be an OpenAI model that supports functions": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/", "doctran_extract_properties.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_extract_properties/", "google_translate.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_translate/", "doctran_interrogate_document.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_interrogate_document/", "doctran_translate_document.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_translate_document/", "Feature structure of `mlqa/en` dataset:": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tensorflow_datasets/", "@markdown Please fill in the both the Google Cloud region and name of your Cloud SQL instance.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_cloud_sql_mssql/", "airbyte_salesforce.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_salesforce/", "airbyte_cdk.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_cdk/", "airbyte_stripe.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_stripe/", "copypaste.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/copypaste/", "airbyte_typeform.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_typeform/", "apify_dataset.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_datastore/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "airbyte_hubspot.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_hubspot/", "airbyte_gong.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_gong/", "@markdown Please specify an endpoint associated with the instance and a key prefix for demo purpose.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_memorystore_redis/", "@markdown Please specify an instance and a table for demo purpose.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_bigtable/", "@title Set Your Values Here { display-mode: \"form\" }": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_el_carro/", "airbyte_shopify.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_shopify/", "airbyte_zendesk_support.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_zendesk_support/", "@markdown Please specify an instance id, a database, and a table for demo purpose.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_spanner/", "The underlying python-gremlin has a problem when running in notebook": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/", "sagemaker.md": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/quickstart/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/constructing/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "indexing.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/indexing/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "Custom Retriever {#custom-retriever}": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/custom_retriever/", "Define your embedding model": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/time_weighted_vectorstore/", "This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/", "Self-querying": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/self_query-checkpoint/", "Custom Document Loader {#custom-document-loader}": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/custom/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "BaseStore": {"fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/"}, "VectorStore": {"fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/"}, "InMemoryStore": {"fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "This text splitter is used to create the child documents": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/parent_document_retriever/"}, "ContextualCompressionRetriever": {"Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "Get 3 diff embeddings.": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/"}, "LLMLinguaCompressor": {"Helper function for printing docs": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/"}, "RetrievalQA": {"Helper function for printing docs": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "bedrock.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bedrock/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Here we want to make sure the answer is minimally relevant": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "scann.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "TODO : Set values as per your requirements": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "or shorter": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "Clean up KDB.AI \"documents\" table and index for similarity search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "DocumentDB connection string": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "initialize MongoDB python client": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/"}, "ElasticSearchBM25Retriever": {"Alternatively, you can load an existing index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elastic_search_bm25/"}, "OutlineRetriever": {"outline.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/outline/", "Outline": "https://python.langchain.com/v0.2/docs/integrations/providers/outline/"}, "ConversationalRetrievalChain": {"outline.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/outline/", "get a token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/retrievers/arxiv/", "Setup API keys for Kay and OpenAI": "https://python.langchain.com/v0.2/docs/integrations/retrievers/sec_filings/", "Setup API key": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kay/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/"}, "ZepMemory": {"Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "Zep": "https://python.langchain.com/v0.2/docs/integrations/providers/zep/"}, "SearchScope": {"Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/"}, "SearchType": {"Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/"}, "ZepRetriever": {"Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "Zep": "https://python.langchain.com/v0.2/docs/integrations/providers/zep/"}, "VespaRetriever": {"vespa.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/vespa/", "Vespa": "https://python.langchain.com/v0.2/docs/integrations/providers/vespa/"}, "AmazonKendraRetriever": {"amazon_kendra_retriever.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/amazon_kendra_retriever/"}, "AmazonKnowledgeBasesRetriever": {"bedrock.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bedrock/"}, "Bedrock": {"bedrock.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bedrock/", "Guardrails for Amazon Bedrock with trace": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/"}, "CohereRerank": {"OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/"}, "Cohere": {"OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/"}, "NeuralDBRetriever": {"From scratch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/thirdai_neuraldb/"}, "SingleStoreDB": {"Establishing a connection to the database is facilitated through the singlestoredb Python connector.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/providers/singlestoredb/"}, "WikipediaRetriever": {"get a token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/retrievers/wikipedia/", "Wikipedia": "https://python.langchain.com/v0.2/docs/integrations/providers/wikipedia/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/"}, "MetalRetriever": {"metal.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/metal/", "Metal": "https://python.langchain.com/v0.2/docs/integrations/providers/metal/"}, "BreebsRetriever": {"breebs.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/breebs/", "Breebs (Open Knowledge)": "https://python.langchain.com/v0.2/docs/integrations/providers/breebs/"}, "CSVLoader": {"STEP 1: Load": "https://python.langchain.com/v0.2/docs/integrations/retrievers/chatgpt-plugin/", "csv.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/csv/", "pebblo.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pebblo/", "CSV": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/csv/"}, "ChatGPTPluginRetriever": {"STEP 1: Load": "https://python.langchain.com/v0.2/docs/integrations/retrievers/chatgpt-plugin/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/"}, "KayAiRetriever": {"Setup API keys for Kay and OpenAI": "https://python.langchain.com/v0.2/docs/integrations/retrievers/sec_filings/", "Setup API key": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kay/"}, "ChatCohere": {"cohere.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/chat/cohere/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/quick_start/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/"}, "CohereRagRetriever": {"cohere.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/"}, "DriaRetriever": {"Installation {#installation}": "https://python.langchain.com/v0.2/docs/integrations/retrievers/dria_index/"}, "DocArrayRetriever": {"initialize the index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/docarray_retriever/"}, "SVMRetriever": {"svm.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/svm/"}, "TavilySearchAPIRetriever": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tavily/"}, "PineconeHybridSearchRetriever": {"create the index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/pinecone_hybrid_search/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/providers/pinecone/"}, "DeepLake": {"# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "in case if some queries fail consider installing libdeeplake manually": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "Activeloop Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/providers/activeloop_deeplake/", "or shorter": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/"}, "AsyncHtmlLoader": {"# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "html2text.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/html2text/", "async_html.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/async_html/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "Html2TextTransformer": {"# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "html2text.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/html2text/", "async_chromium.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/async_chromium/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "create_structured_output_chain": {"# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/mapping/"}, "HumanMessagePromptTemplate": {"# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "get a chat completion from the formatted messages": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "see https://python.langchain.com/en/latest/modules/data_connection/getting_started.html for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "Prompts": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/prompts-checkpoint/", "Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/", "Prompt templates": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/prompt_templates/.ipynb_checkpoints/index-checkpoint/"}, "PubMedRetriever": {"pubmed.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/pubmed/", "PubMed": "https://python.langchain.com/v0.2/docs/integrations/providers/pubmed/"}, "WeaviateHybridSearchRetriever": {"client.schema.delete_all()": "https://python.langchain.com/v0.2/docs/integrations/retrievers/weaviate-hybrid/"}, "EmbedchainRetriever": {"Installation {#installation}": "https://python.langchain.com/v0.2/docs/integrations/retrievers/embedchain/"}, "create_retrieval_chain": {"ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/chat_history/"}, "create_stuff_documents_chain": {"ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/chat_history/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/quickstart/"}, "ArxivRetriever": {"get a token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/retrievers/arxiv/", "Arxiv": "https://python.langchain.com/v0.2/docs/integrations/providers/arxiv/"}, "BM25Retriever": {"bm25.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bm25/", "Loading the COMVEST 2024 notice": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "initialize the bm25 retriever and faiss retriever": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/ensemble/"}, "QdrantSparseVectorRetriever": {"Create a retriever with a demo encoder": "https://python.langchain.com/v0.2/docs/integrations/retrievers/qdrant-sparse/"}, "DeterministicFakeEmbedding": {"elasticsearch_retriever.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/"}, "Embeddings": {"elasticsearch_retriever.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/", "Ensure that all we need is installed": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/infinispanvs/"}, "ElasticsearchRetriever": {"elasticsearch_retriever.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/"}, "ArceeRetriever": {"Define filters": "https://python.langchain.com/v0.2/docs/integrations/retrievers/arcee/", "Arcee": "https://python.langchain.com/v0.2/docs/integrations/providers/arcee/"}, "FlashrankRerank": {"OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/"}, "ChaindeskRetriever": {"chaindesk.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/chaindesk/", "Chaindesk": "https://python.langchain.com/v0.2/docs/integrations/providers/chaindesk/"}, "MergerRetriever": {"Get 3 diff embeddings.": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/"}, "EmbeddingsClusteringFilter": {"Get 3 diff embeddings.": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/"}, "EmbeddingsRedundantFilter": {"Get 3 diff embeddings.": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/"}, "LongContextReorder": {"Get 3 diff embeddings.": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "Get embeddings.": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/long_context_reorder/"}, "TFIDFRetriever": {"tf_idf.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tf_idf/"}, "GoogleVertexAIMultiTurnSearchRetriever": {"google_vertex_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/google_vertex_ai_search/"}, "GoogleVertexAISearchRetriever": {"google_vertex_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/google_vertex_ai_search/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/"}, "Milvus": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/milvus_self_query/", "Milvus": "https://python.langchain.com/v0.2/docs/integrations/providers/milvus/", "Zilliz": "https://python.langchain.com/v0.2/docs/integrations/providers/zilliz/", "replace": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/milvus/"}, "AttributeInfo": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "create DashVector collection": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "or install latest:": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "Get openAI api key by reading local .env file": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "create new index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "in case if some queries fail consider installing libdeeplake manually": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "import os": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/", "This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/", "Self-querying": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/self_query-checkpoint/"}, "SelfQueryRetriever": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "create DashVector collection": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "or install latest:": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "Get openAI api key by reading local .env file": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "create new index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "in case if some queries fail consider installing libdeeplake manually": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "import os": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/chroma-checkpoint/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/", "Self-querying": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/self_query-checkpoint/"}, "OpenAI": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "or install latest:": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "Get openAI api key by reading local .env file": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "create new index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "in case if some queries fail consider installing libdeeplake manually": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "import os": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "use it with langchain {#use-it-with-langchain}": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "google_serper.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "Answer with 'Zhu'": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "google_finance.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "awslambda.md": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "folder_id='1yucgL9WGgWZdM1TOuKkeghlPizuzMYb5'": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "openweathermap.md": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "search_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/search_tools/", "eleven_labs_tts.md": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "send data into the chain": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/", "lemonai.md": "https://python.langchain.com/v0.2/docs/integrations/tools/lemonai/", "graphql.md": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "searchapi.md": "https://python.langchain.com/v0.2/docs/integrations/tools/searchapi/", "gradio_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/gradio_tools/", "sceneXplain.md": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "Needed if you would like to display images in the notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "Based on ReAct Agent": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "loads previous state from Mot\u00f6rhead \ud83e\udd18": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/", "Here we want to make sure the answer is minimally relevant": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Fiddler project and model names, used for model registration": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "LLM Hyperparameters": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "labelstudio.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "Connect to Comet if no API Key is set": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "promptlayer.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint/", "trubrics.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "Install necessary dependencies.": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "From LangChain, import standard modules for prompting.": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "LangChain Decorators \u2728": "https://python.langchain.com/v0.2/docs/integrations/providers/langchain_decorators/", "!pip3 install rebuff openai -U": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "SCENARIO 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Helicone": "https://python.langchain.com/v0.2/docs/integrations/providers/helicone/", "Shale Protocol": "https://python.langchain.com/v0.2/docs/integrations/providers/shaleprotocol/", "you don't need to call close to write profiles to WhyLabs, upload will occur periodically, but to demo let's not wait.": "https://python.langchain.com/v0.2/docs/integrations/providers/whylabs_profiling/", "wandb documentation to configure wandb using env variables": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "Setup and use the ClearML Callback": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "0: Import ray serve and request from starlette": "https://python.langchain.com/v0.2/docs/integrations/providers/ray_serve/", "Create a dataframe": "https://python.langchain.com/v0.2/docs/integrations/toolkits/csv/", "xorbits.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/xorbits/", "jira.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/", "in apache-spark root directory. (tested here with \"spark-3.4.0-bin-hadoop3 and later\")": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark/", "For Windows/Linux": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_cognitive_services/", "Select the LLM to use. Here, we use gpt-3.5-turbo-instruct": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/", "steam.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/", "json.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/json/", "Copilot Sandbox": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/office365/", "pandas.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/pandas/", "nasa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/", "azure_ai_services.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_ai_services/", "NOTE: In this example. We must set `allow_dangerous_request=True` to enable the OpenAPI Agent to automatically use the Request Tool.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "initialize marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "DocumentDB connection string": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "initialize MongoDB python client": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "Uncomment this to install psychicapi if you don't already have it installed": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "You can store your OPENAI_API_KEY in a .env file as well": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/amazon_textract/", "networkx.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/", "get a token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/llms/openai/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/llm_caching/", "Layerup Security": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/layerup_security/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Note that we set max_retries = 0 to avoid retrying on RateLimits, etc": "https://python.langchain.com/v0.2/docs/guides/productionization/fallbacks/", "Using https://huggingface.co/laiyer/deberta-v3-base-prompt-injection": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/hugging_face_prompt_injection/", "Logical Fallacy chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/logical_fallacy_chain/", "Constitutional chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/constitutional_chain/", "moderation.md": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/moderation/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/", "Get embeddings.": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/long_context_reorder/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/", "Self-querying": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/self_query-checkpoint/", "agent_with_memory_in_db.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory_in_db/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "!python -m spacy download en_core_web_lg": "https://python.langchain.com/v0.2/docs/modules/memory/custom_memory/", "adding_memory_chain_multiple_inputs.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory_chain_multiple_inputs/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "Combined": "https://python.langchain.com/v0.2/docs/modules/memory/multiple_memory/", "Here it is by default set to \"AI\"": "https://python.langchain.com/v0.2/docs/modules/memory/conversational_customization/", "agent_with_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory/", "kg.md": "https://python.langchain.com/v0.2/docs/modules/memory/types/kg/", "We can see here that the buffer is updated": "https://python.langchain.com/v0.2/docs/modules/memory/types/token_buffer/", "Entity": "https://python.langchain.com/v0.2/docs/modules/memory/types/entity_summary_memory/", "Conversation Summary": "https://python.langchain.com/v0.2/docs/modules/memory/types/summary/", "Backed by a Vector Store": "https://python.langchain.com/v0.2/docs/modules/memory/types/vectorstore_retriever_memory/", "Conversation Buffer Window": "https://python.langchain.com/v0.2/docs/modules/memory/types/buffer_window/", "Conversation Buffer": "https://python.langchain.com/v0.2/docs/modules/memory/types/buffer/", "We can see here that there is a summary of the conversation and then some previous interactions": "https://python.langchain.com/v0.2/docs/modules/memory/types/summary_buffer/", "Callbacks": "https://python.langchain.com/v0.2/docs/modules/callbacks/index/", "First, define custom callback handler implementations": "https://python.langchain.com/v0.2/docs/modules/callbacks/multiple_callbacks/", "You can kick off concurrent runs from within the context manager": "https://python.langchain.com/v0.2/docs/modules/callbacks/token_counting/", "this chain will both print to stdout (because verbose=True) and write to 'output.log'": "https://python.langchain.com/v0.2/docs/modules/callbacks/filecallbackhandler/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/handle_parsing_errors/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/", "Define your desired data structure.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/quick_start/", "retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/", "datetime.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/datetime/", "token_usage_tracking.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/token_usage_tracking/", "streaming_llm.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/streaming_llm/", "Quick Start {#quick-start}": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/quick_start/", "batch_configurable_chain([\"ice cream\", \"spaghetti\", \"dumplings\"])": "https://python.langchain.com/v0.2/docs/expression_language/why/", "> ChatPromptValue(messages=[HumanMessage(content='tell me a short joke about ice cream')])": "https://python.langchain.com/v0.2/docs/expression_language/get_started/"}, "PGVector": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pgvector_self_query/", "PGVector": "https://python.langchain.com/v0.2/docs/integrations/providers/pgvector/", "See docker command above to launch a postgres instance with pgvector enabled.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvector/"}, "Weaviate": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/weaviate_self_query/"}, "Vectara": {"Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vectara/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/index/"}, "DashVector": {"create DashVector collection": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "DashVector": "https://python.langchain.com/v0.2/docs/integrations/providers/dashvector/", "add texts": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/"}, "Tongyi": {"create DashVector collection": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/tongyi/"}, "DatabricksVectorSearch": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/databricks_vector_search/", "databricks_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/"}, "Dingo": {"or install latest:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/providers/dingo/"}, "OpenSearchVectorSearch": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/opensearch_self_query/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/providers/opensearch/", "If using the default Docker installation, use this instantiation instead:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/"}, "ElasticsearchStore": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/elasticsearch_self_query/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/providers/elasticsearch/", "Metadata {#metadata}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "indexing.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/indexing/"}, "ConnectionParams": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/tencentvectordb/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "baiduvectordb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/"}, "MetaField": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/tencentvectordb/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/"}, "TencentVectorDB": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/tencentvectordb/", "Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/"}, "TimescaleVector": {"Get openAI api key by reading local .env file": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/"}, "AstraDB": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/astradb/"}, "SupabaseVectorStore": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/supabase_self_query/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/providers/supabase/", "with pip": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/"}, "Redis": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/redis_self_query/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/redis/", "connection to redis standalone at localhost, db 0, no password": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "MyScale": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/myscale_self_query/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/providers/myscale/", "use directly a `where_str` to delete": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/"}, "MongoDBAtlasVectorSearch": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "initialize MongoDB python client": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/"}, "Qdrant": {"import os": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "Qdrant": "https://python.langchain.com/v0.2/docs/integrations/providers/qdrant/", "qdrant.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/qdrant/", "Vector stores": "https://python.langchain.com/v0.2/docs/modules/data_connection/vectorstores/index/"}, "AzureMLOnlineEndpoint": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "AzureOpenAI": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "The API version you want to use: set this to `2023-12-01-preview` for the released version.": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_openai/"}, "AzureChatOpenAI": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "azure_chat_openai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/azure_chat_openai/", "The underlying python-gremlin has a problem when running in notebook": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "AzureAIDataLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Create a connection to your project": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_ai_data/"}, "AzureAIDocumentIntelligenceLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "microsoft_word.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_word/", "microsoft_excel.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_excel/", "microsoft_powerpoint.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_powerpoint/", "azure_document_intelligence.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_document_intelligence/", "Microsoft Office": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/office_file/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/", "HTML": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/html/"}, "AzureBlobStorageContainerLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "azure_blob_storage_container.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_blob_storage_container/"}, "AzureBlobStorageFileLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "azure_blob_storage_file.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_blob_storage_file/"}, "OneDriveLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "microsoft_onedrive.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_onedrive/"}, "UnstructuredWordDocumentLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "microsoft_word.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_word/"}, "UnstructuredExcelLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "microsoft_excel.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_excel/"}, "SharePointLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "loads documents from root directory": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_sharepoint/"}, "UnstructuredPowerPointLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "microsoft_powerpoint.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_powerpoint/"}, "OneNoteLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "microsoft_onenote.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_onenote/"}, "AzureCosmosDBVectorSearch": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "Set up the OpenAI Environment Variables": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/"}, "O365Toolkit": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/office365/"}, "PowerBIToolkit": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "fictional example": "https://python.langchain.com/v0.2/docs/integrations/toolkits/powerbi/"}, "PowerBIDataset": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "fictional example": "https://python.langchain.com/v0.2/docs/integrations/toolkits/powerbi/"}, "BingSearchAPIWrapper": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "bing_search.md": "https://python.langchain.com/v0.2/docs/integrations/tools/bing_search/"}, "PresidioAnonymizer": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Download model": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/index/"}, "PresidioReversibleAnonymizer": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "Multi-language data anonymization with Microsoft Presidio {#multi-language-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/multi_language/", "Reversible data anonymization with Microsoft Presidio {#reversible-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/reversible/", "Download model": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/index/"}, "AmazonAPIGateway": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "These are sample parameters for Falcon 40B Instruct Deployed from Amazon SageMaker JumpStart": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/"}, "ContentHandlerBase": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "S3DirectoryLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "aws_s3_directory.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/aws_s3_directory/"}, "S3FileLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "aws_s3_file.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/aws_s3_file/"}, "AmazonTextractPDFLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "You can store your OPENAI_API_KEY in a .env file as well": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/amazon_textract/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "AthenaLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "athena.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/athena/"}, "DocumentDBVectorSearch": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "DynamoDBChatMessageHistory": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/memory/aws_dynamodb/"}, "SageMakerCallbackHandler": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "LLM Hyperparameters": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/"}, "AmazonComprehendModerationChain": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "ChatHuggingFace": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/"}, "HuggingFacePipeline": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "huggingface_pipelines.md": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_pipelines/", "lmformatenforcer_experimental.md": "https://python.langchain.com/v0.2/docs/integrations/llms/lmformatenforcer_experimental/", "We'll choose a regex that matches to a structured json string that looks like:": "https://python.langchain.com/v0.2/docs/integrations/llms/rellm_experimental/", "mlx_pipelines.md": "https://python.langchain.com/v0.2/docs/integrations/llms/mlx_pipelines/", "jsonformer_experimental.md": "https://python.langchain.com/v0.2/docs/integrations/llms/jsonformer_experimental/", "openvino.md": "https://python.langchain.com/v0.2/docs/integrations/llms/openvino/", "weight_only_quantization.md": "https://python.langchain.com/v0.2/docs/integrations/llms/weight_only_quantization/"}, "HuggingFaceDatasetLoader": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "hugging_face_dataset.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/hugging_face_dataset/"}, "load_huggingface_tool": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "Requires transformers>=4.29.0 and huggingface_hub>=0.14.1": "https://python.langchain.com/v0.2/docs/integrations/tools/huggingface_tools/"}, "ChatGPTLoader": {"OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "chatgpt_loader.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/chatgpt_loader/"}, "DallEAPIWrapper": {"OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "Needed if you would like to display images in the notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/"}, "OpenAIModerationChain": {"OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "moderation.md": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/moderation/"}, "GoogleGenerativeAI": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_ai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_ai/"}, "VertexAIModelGarden": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_vertex_ai_palm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/"}, "ChatGoogleGenerativeAI": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Note that each chunk may contain more than one \"token\"": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/"}, "ChatVertexAI": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "for running these examples in the notebook:": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "google_vertex_ai_palm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "structured_output.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/structured_output/", "response_metadata.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/response_metadata/"}, "BigQueryLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/google-checkpoint/", "Note that the `id` column is being returned twice, with one instance aliased as `source`": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_bigquery/"}, "GCSDirectoryLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/google-checkpoint/", "google_cloud_storage_directory.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_cloud_storage_directory/"}, "GCSFileLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/google-checkpoint/", "google_cloud_storage_file.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_cloud_storage_file/"}, "GoogleDriveLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/google-checkpoint/", "folder_id='1yucgL9WGgWZdM1TOuKkeghlPizuzMYb5'": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_drive/"}, "GoogleSpeechToTextLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "or a local file path: file_path = \"./audio.wav\"": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_speech_to_text/"}, "Blob": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_docai.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_docai/", "Configure the parsers that you want to use per mime-type!": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_files/", "Custom Document Loader {#custom-document-loader}": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/custom/"}, "DocAIParser": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_docai.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_docai/"}, "GoogleTranslateTransformer": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_translate.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_translate/"}, "BigQueryVectorSearch": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_bigquery_vector_search/"}, "VectorSearchVectorStore": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "TODO : Set values as per your requirements": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/"}, "ScaNN": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "scann.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/"}, "GoogleDocumentAIWarehouseRetriever": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/"}, "GoogleCloudTextToSpeechTool": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_cloud_texttospeech.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_cloud_texttospeech/"}, "GoogleFinanceQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_finance.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/"}, "GoogleFinanceAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_finance.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/"}, "GoogleJobsQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "use it with langchain {#use-it-with-langchain}": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/"}, "GoogleLensQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Runs google lens on an image of Danny Devito": "https://python.langchain.com/v0.2/docs/integrations/tools/google_lens/"}, "GoogleLensAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Runs google lens on an image of Danny Devito": "https://python.langchain.com/v0.2/docs/integrations/tools/google_lens/"}, "GooglePlacesTool": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_places.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_places/"}, "GoogleScholarQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_scholar.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_scholar/"}, "GoogleScholarAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_scholar.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_scholar/"}, "GoogleSearchAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/google-checkpoint/", "google_search.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_search/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/", "agent_with_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory/"}, "GoogleTrendsQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_trends.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_trends/"}, "GoogleTrendsAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_trends.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_trends/"}, "GmailToolkit": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gmail/"}, "SearchApiAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "searchapi.md": "https://python.langchain.com/v0.2/docs/integrations/tools/searchapi/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/"}, "SerpAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "You can create the tool to pass to an agent": "https://python.langchain.com/v0.2/docs/integrations/tools/serpapi/", "setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "SerpAPI": "https://python.langchain.com/v0.2/docs/integrations/providers/serpapi/", "Initialize the language model": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/examples/comparisons/", "agent_with_memory_in_db.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory_in_db/"}, "GoogleSerperAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_serper.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/"}, "YouTubeSearchTool": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "youtube.md": "https://python.langchain.com/v0.2/docs/integrations/tools/youtube/"}, "YoutubeAudioLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/"}, "OpenAIWhisperParser": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/"}, "YoutubeLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "YouTube": "https://python.langchain.com/v0.2/docs/integrations/providers/youtube/", "Init the GoogleApiClient": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_transcript/", "%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/quickstart/", "%pip install -qU langchain langchain-openai youtube-transcript-api pytube": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/structuring/"}, "AnthropicLLM": {"Anthropic": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/anthropic-checkpoint/", "anthropic.md": "https://python.langchain.com/v0.2/docs/integrations/llms/anthropic/"}, "MatchingEngine": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/google-checkpoint/"}, "AzureCognitiveSearchRetriever": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/"}, "AIPluginTool": {"chatgpt_plugins.md": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/"}, "DataForSeoAPIWrapper": {"dataforseo.md": "https://python.langchain.com/v0.2/docs/integrations/tools/dataforseo/", "DataForSEO": "https://python.langchain.com/v0.2/docs/integrations/providers/dataforseo/"}, "Tool": {"dataforseo.md": "https://python.langchain.com/v0.2/docs/integrations/tools/dataforseo/", "You can create the tool to pass to an agent": "https://python.langchain.com/v0.2/docs/integrations/tools/serpapi/", "google_serper.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "searchapi.md": "https://python.langchain.com/v0.2/docs/integrations/tools/searchapi/", "google_search.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_search/", "Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "Based on ReAct Agent": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "Pydantic compatibility": "https://python.langchain.com/v0.2/docs/guides/development/pydantic_compatibility/", "Initialize the language model": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/examples/comparisons/", "agent_with_memory_in_db.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory_in_db/", "agent_with_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory/", "need to use GPT-4 here as GPT-3.5 does not understand, however hard you insist, that": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_iter/"}, "ConneryService": {"Specify your Connery Runner credentials.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/"}, "DataheraldAPIWrapper": {"dataherald.md": "https://python.langchain.com/v0.2/docs/integrations/tools/dataherald/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/dataherald-checkpoint/"}, "SearxSearchWrapper": {"searx_search.md": "https://python.langchain.com/v0.2/docs/integrations/tools/searx_search/", "SearxNG Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/searx/"}, "PythonREPL": {"You can create the tool to pass to an agent": "https://python.langchain.com/v0.2/docs/integrations/tools/python/", "code_writing.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/code_writing/"}, "GoogleJobsAPIWrapper": {"use it with langchain {#use-it-with-langchain}": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/"}, "InfobipAPIWrapper": {"How to use it inside an Agent {#how-to-use-it-inside-an-agent}": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/"}, "StructuredTool": {"How to use it inside an Agent {#how-to-use-it-inside-an-agent}": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/", "Import things that are needed generically": "https://python.langchain.com/v0.2/docs/modules/tools/custom_tools/"}, "E2BDataAnalysisTool": {"Artifacts are charts created by matplotlib when `plt.show()` is called": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/", "openai_assistants.md": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_assistants/"}, "SQLDatabase": {"In order to build a selectable on SA's Core API, you need a table definition.": "https://python.langchain.com/v0.2/docs/integrations/tools/sql_database/", "CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "!pip3 install rebuff openai -U": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Using LangSmith is recommended but not required. Uncomment below lines to use.": "https://python.langchain.com/v0.2/docs/use_cases/sql/csv/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/"}, "HumanInputRun": {"Answer with 'Zhu'": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/"}, "NucliaUnderstandingAPI": {"nuclia.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/nuclia/", "Nuclia": "https://python.langchain.com/v0.2/docs/integrations/providers/nuclia/", "nuclia_transformer.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/nuclia_transformer/"}, "YahooFinanceNewsTool": {"How YahooFinanceNewsTool works? {#how-yahoofinancenewstool-works}": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/"}, "WikidataAPIWrapper": {"wikidata.md": "https://python.langchain.com/v0.2/docs/integrations/tools/wikidata/"}, "WikidataQueryRun": {"wikidata.md": "https://python.langchain.com/v0.2/docs/integrations/tools/wikidata/"}, "TwilioAPIWrapper": {"twilio.md": "https://python.langchain.com/v0.2/docs/integrations/tools/twilio/"}, "IFTTTWebhook": {"ifttt.md": "https://python.langchain.com/v0.2/docs/integrations/tools/ifttt/"}, "SemanticScholarQueryRun": {"start by installing semanticscholar api": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/"}, "WikipediaQueryRun": {"wikipedia.md": "https://python.langchain.com/v0.2/docs/integrations/tools/wikipedia/", "index.md": "https://python.langchain.com/v0.2/docs/modules/tools/index/", "pip install wikipedia": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/intermediate_steps/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/max_iterations/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/"}, "WikipediaAPIWrapper": {"wikipedia.md": "https://python.langchain.com/v0.2/docs/integrations/tools/wikipedia/", "Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "index.md": "https://python.langchain.com/v0.2/docs/modules/tools/index/", "pip install wikipedia": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/intermediate_steps/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/max_iterations/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/"}, "AlphaVantageAPIWrapper": {"alpha_vantage.md": "https://python.langchain.com/v0.2/docs/integrations/tools/alpha_vantage/"}, "StackExchangeAPIWrapper": {"stackexchange.md": "https://python.langchain.com/v0.2/docs/integrations/tools/stackexchange/", "Stack Exchange": "https://python.langchain.com/v0.2/docs/integrations/providers/stackexchange/"}, "TextRequestsWrapper": {"Each tool wrapps a requests wrapper": "https://python.langchain.com/v0.2/docs/integrations/tools/requests/"}, "OpenWeatherMapAPIWrapper": {"openweathermap.md": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "OpenWeatherMap": "https://python.langchain.com/v0.2/docs/integrations/providers/openweathermap/"}, "get_from_env": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/"}, "NutritionAI": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/"}, "NutritionAIAPI": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/"}, "PubmedQueryRun": {"pubmed.md": "https://python.langchain.com/v0.2/docs/integrations/tools/pubmed/"}, "ConversationBufferMemory": {"memorize.md": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "gradio_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/gradio_tools/", "sceneXplain.md": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "Or via the async API": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "Guardrails for Amazon Bedrock with trace": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "adding_memory_chain_multiple_inputs.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory_chain_multiple_inputs/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "Combined": "https://python.langchain.com/v0.2/docs/modules/memory/multiple_memory/", "Here it is by default set to \"AI\"": "https://python.langchain.com/v0.2/docs/modules/memory/conversational_customization/", "Conversation Buffer": "https://python.langchain.com/v0.2/docs/modules/memory/types/buffer/"}, "GradientLLM": {"memorize.md": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "Gradient": "https://python.langchain.com/v0.2/docs/integrations/providers/gradient/", "Improve the results by fine-tuning (optional) {#improve-the-results-by-fine-tuning-optional}": "https://python.langchain.com/v0.2/docs/integrations/llms/gradient/"}, "ElevenLabsText2SpeechTool": {"eleven_labs_tts.md": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "ElevenLabs": "https://python.langchain.com/v0.2/docs/integrations/providers/elevenlabs/"}, "BearlyInterpreterTool": {"Extract pdf content": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/"}, "VectorstoreIndexCreator": {"apify.md": "https://python.langchain.com/v0.2/docs/integrations/tools/apify/", "hugging_face_dataset.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/hugging_face_dataset/", "Create a vectorstore retriever from the loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/modern_treasury/", "image_captions.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image_captions/", "see https://python.langchain.com/en/latest/modules/data_connection/getting_started.html for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "apify_dataset.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/"}, "ApifyWrapper": {"apify.md": "https://python.langchain.com/v0.2/docs/integrations/tools/apify/", "Apify": "https://python.langchain.com/v0.2/docs/integrations/providers/apify/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "ZapierToolkit": {"get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/"}, "ZapierNLAWrapper": {"get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/"}, "SimpleSequentialChain": {"get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "LLM Hyperparameters": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "!pip3 install rebuff openai -U": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/predibase/", "Run the chain specifying only the input variable for the first chain.": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "magics to auto-reload external modules in case you are making changes to langchain while working on this notebook": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/"}, "TransformChain": {"get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "!pip3 install rebuff openai -U": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/"}, "ZapierNLARunAction": {"get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/"}, "RivaASR": {"send data into the chain": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "RivaTTS": {"send data into the chain": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "RivaAudioEncoding": {"send data into the chain": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "AudioStream": {"send data into the chain": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "GoldenQueryAPIWrapper": {"golden_query.md": "https://python.langchain.com/v0.2/docs/integrations/tools/golden_query/", "Golden": "https://python.langchain.com/v0.2/docs/integrations/providers/golden/"}, "create_react_agent": {"arxiv.md": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "Based on ReAct Agent": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/streamlit/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/dataherald-checkpoint/", "Define the neural network": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "agent_with_memory_in_db.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory_in_db/", "agent_with_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/max_iterations/"}, "ArxivAPIWrapper": {"arxiv.md": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/"}, "DuckDuckGoSearchRun": {"ddg.md": "https://python.langchain.com/v0.2/docs/integrations/tools/ddg/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "openai_assistants.md": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_assistants/"}, "DuckDuckGoSearchAPIWrapper": {"ddg.md": "https://python.langchain.com/v0.2/docs/integrations/tools/ddg/"}, "SceneXplainTool": {"sceneXplain.md": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/"}, "WolframAlphaAPIWrapper": {"wolfram_alpha.md": "https://python.langchain.com/v0.2/docs/integrations/tools/wolfram_alpha/", "Wolfram Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/wolfram_alpha/"}, "RunnableParallel": {"and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/", "> ChatPromptValue(messages=[HumanMessage(content='tell me a short joke about ice cream')])": "https://python.langchain.com/v0.2/docs/expression_language/get_started/", "The input schema of the chain is the input schema of its first part, the prompt.": "https://python.langchain.com/v0.2/docs/expression_language/interface/", "prompt_llm_parser.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_llm_parser/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/", "Adding values to chain state {#adding-values-to-chain-state}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/assign/", "Formatting inputs & output {#formatting-inputs-output}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/parallel/", "Passing data through {#passing-data-through}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/passthrough/", "Chaining runnables {#chaining-runnables}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/sequence/"}, "ExaSearchRetriever": {"and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "exa_search.md": "https://python.langchain.com/v0.2/docs/integrations/providers/exa_search/"}, "TextContentsOptions": {"and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/"}, "OpenAIFunctionsAgent": {"and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Install package": "https://python.langchain.com/v0.2/docs/integrations/toolkits/robocorp/"}, "EdenAiExplicitImageTool": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiObjectDetectionTool": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiParsingIDTool": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiParsingInvoiceTool": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiSpeechToTextTool": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiTextModerationTool": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiTextToSpeechTool": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAI": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/", "Run the chain specifying only the input variable for the first chain.": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/"}, "RedditSearchRun": {"Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "RedditSearchAPIWrapper": {"Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "RedditSearchSchema": {"Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "StructuredChatAgent": {"Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "ReadOnlySharedMemory": {"Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "YouSearchTool": {"For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/tools/you/"}, "ShellTool": {"bash.md": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/"}, "PolygonAggregates": {"Get the last quote for ticker": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonFinancials": {"Get the last quote for ticker": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonLastQuote": {"Get the last quote for ticker": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonTickerNews": {"Get the last quote for ticker": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonAPIWrapper": {"Get the last quote for ticker": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/toolkits/polygon/"}, "PolygonAggregatesSchema": {"Get the last quote for ticker": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "FileManagementToolkit": {"We'll make a temporary directory to avoid clutter": "https://python.langchain.com/v0.2/docs/integrations/tools/filesystem/"}, "BraveSearch": {"brave_search.md": "https://python.langchain.com/v0.2/docs/integrations/tools/brave_search/", "Brave Search": "https://python.langchain.com/v0.2/docs/integrations/providers/brave_search/"}, "RedisChatMessageHistory": {"redis_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "agent_with_memory_in_db.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory_in_db/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/"}, "RunnableWithMessageHistory": {"redis_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/memory/sqlite/", "Optionally, specify your own session_state key for storing messages": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "copy from tidb cloud console": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/chat_history/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "agent_with_memory_in_db.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory_in_db/", "agent_with_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/"}, "ElasticsearchChatMessageHistory": {"If using Elastic Cloud:": "https://python.langchain.com/v0.2/docs/integrations/memory/elasticsearch_chat_message_history/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/providers/elasticsearch/"}, "UpstashRedisChatMessageHistory": {"upstash_redis_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/upstash_redis_chat_message_history/", "Upstash Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/upstash/"}, "SingleStoreDBChatMessageHistory": {"singlestoredb_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/singlestoredb_chat_message_history/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/providers/singlestoredb/"}, "PostgresChatMessageHistory": {"postgres_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/postgres_chat_message_history/"}, "MomentoChatMessageHistory": {"momento_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/momento_chat_message_history/", "Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/"}, "XataChatMessageHistory": {"xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/providers/xata/"}, "XataVectorStore": {"xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "xata.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/"}, "create_retriever_tool": {"xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/agents/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/"}, "CassandraChatMessageHistory": {"cassandra_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/cassandra_chat_message_history/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/"}, "SQLChatMessageHistory": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/memory/sqlite/", "SQLite": "https://python.langchain.com/v0.2/docs/integrations/providers/sqlite/"}, "MotorheadMemory": {"loads previous state from Mot\u00f6rhead \ud83e\udd18": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/", "Mot\u00f6rhead": "https://python.langchain.com/v0.2/docs/integrations/providers/motorhead/"}, "AstraDBChatMessageHistory": {"astradb_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/astradb_chat_message_history/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/"}, "StreamlitChatMessageHistory": {"Optionally, specify your own session_state key for storing messages": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/providers/streamlit/"}, "Neo4jChatMessageHistory": {"neo4j_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/neo4j_chat_message_history/"}, "TiDBChatMessageHistory": {"copy from tidb cloud console": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "TiDB": "https://python.langchain.com/v0.2/docs/integrations/providers/tidb/"}, "RocksetChatMessageHistory": {"rockset_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/rockset_chat_message_history/", "Rockset": "https://python.langchain.com/v0.2/docs/integrations/providers/rockset/"}, "HuggingFaceTextGenInference": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/"}, "HuggingFaceEndpoint": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "get a token: https://huggingface.co/docs/api-inference/quicktour#get-your-api-token": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/"}, "HuggingFaceHub": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/", "Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "format_log_to_str": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/"}, "ReActJsonSingleInputOutputParser": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/"}, "render_text_description": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/prompting/"}, "AzureMLChatOnlineEndpoint": {"azureml_chat_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/"}, "AzureMLEndpointApiType": {"azureml_chat_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/", "azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "CustomOpenAIChatContentFormatter": {"azureml_chat_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/"}, "ChatKinetica": {"Install Langchain community and core packages": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/", "Kinetica": "https://python.langchain.com/v0.2/docs/integrations/providers/kinetica/"}, "KineticaSqlOutputParser": {"Install Langchain community and core packages": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/"}, "KineticaSqlResponse": {"Install Langchain community and core packages": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/"}, "PaiEasChatEndpoint": {"alibaba_cloud_pai_eas.md": "https://python.langchain.com/v0.2/docs/integrations/chat/alibaba_cloud_pai_eas/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/"}, "ChatFireworks": {"Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/chat/fireworks/", "structured_output.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/structured_output/", "response_metadata.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/response_metadata/"}, "ChatOctoAI": {"octoai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/octoai/"}, "ChatDeepInfra": {"get a new token: https://deepinfra.com/login?from=%2Fdash": "https://python.langchain.com/v0.2/docs/integrations/chat/deepinfra/", "DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/providers/deepinfra/"}, "StreamingStdOutCallbackHandler": {"get a new token: https://deepinfra.com/login?from=%2Fdash": "https://python.langchain.com/v0.2/docs/integrations/chat/deepinfra/", "litellm.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/", "Let\u2019s try out LLAMA model offered on EverlyAI Hosted Endpoints {#lets-try-out-llama-model-offered-on-everlyai-hosted-endpoints}": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/", "gpt_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/", "litellm_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/", "zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "yuan2.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/providers/gpt4all/", "arthur_tracking.md": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "textgen.md": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "Callbacks support token-wise streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "Guardrails for Amazon Bedrock with trace": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/", "Note importing TitanTakeoffPro instead of TitanTakeoff will work as well both use same object under the hood": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/", "Run the chain specifying only the input variable for the first chain.": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "ctransformers.md": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/", "get a token: https://huggingface.co/docs/api-inference/quicktour#get-your-api-token": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/", "magics to auto-reload external modules in case you are making changes to langchain while working on this notebook": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/"}, "ToolsOutputParser": {"open ../../../static/img/brand/wordmark.png as base64 str": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/"}, "ChatGroq": {"groq.md": "https://python.langchain.com/v0.2/docs/integrations/chat/groq/", "structured_output.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/structured_output/", "response_metadata.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/response_metadata/"}, "ChatLiteLLM": {"litellm.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/"}, "CallbackManager": {"litellm.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/", "gpt_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/", "litellm_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/", "zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Callbacks support token-wise streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "Note importing TitanTakeoffPro instead of TitanTakeoff will work as well both use same object under the hood": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/"}, "LlamaEdgeChatService": {"service url": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_edge/"}, "HarmBlockThreshold": {"Note that each chunk may contain more than one \"token\"": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/", "for running these examples in the notebook:": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "google_ai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_ai/"}, "HarmCategory": {"Note that each chunk may contain more than one \"token\"": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/", "for running these examples in the notebook:": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "google_ai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_ai/"}, "OllamaFunctions": {"Schema": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama_functions/", "Ollama": "https://python.langchain.com/v0.2/docs/integrations/providers/ollama/"}, "create_extraction_chain": {"Schema": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama_functions/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "VolcEngineMaasChat": {"Install the package": "https://python.langchain.com/v0.2/docs/integrations/chat/volcengine_maas/"}, "ChatLlamaAPI": {"Replace 'Your_API_Token' with your actual API token": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_api/"}, "create_tagging_chain": {"Replace 'Your_API_Token' with your actual API token": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_api/"}, "ChatKonko": {"Konko {#konko}": "https://python.langchain.com/v0.2/docs/integrations/chat/konko/"}, "create_structured_runnable": {"for running these examples in the notebook:": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/"}, "MLXPipeline": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "mlx_pipelines.md": "https://python.langchain.com/v0.2/docs/integrations/llms/mlx_pipelines/"}, "ChatMLX": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/"}, "GigaChat": {"gigachat.md": "https://python.langchain.com/v0.2/docs/integrations/llms/gigachat/", "Salute Devices": "https://python.langchain.com/v0.2/docs/integrations/providers/salute_devices/"}, "JinaChat": {"get a chat completion from the formatted messages": "https://python.langchain.com/v0.2/docs/integrations/chat/jinachat/"}, "SystemMessagePromptTemplate": {"get a chat completion from the formatted messages": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "see https://python.langchain.com/en/latest/modules/data_connection/getting_started.html for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/agents/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "Prompts": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/prompts-checkpoint/"}, "ChatOllama": {"LangChain supports many other chat models. Here, we're using Ollama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "Ollama": "https://python.langchain.com/v0.2/docs/integrations/providers/ollama/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/"}, "get_openai_callback": {"azure_chat_openai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/azure_chat_openai/", "You can kick off concurrent runs from within the context manager": "https://python.langchain.com/v0.2/docs/modules/callbacks/token_counting/", "!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/", "token_usage_tracking.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/token_usage_tracking/", "Run custom functions {#run-custom-functions}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/functions/"}, "ChatEverlyAI": {"Let\u2019s try out LLAMA model offered on EverlyAI Hosted Endpoints {#lets-try-out-llama-model-offered-on-everlyai-hosted-endpoints}": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/"}, "GPTRouter": {"gpt_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/"}, "GPTRouterModel": {"gpt_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/"}, "ChatLiteLLMRouter": {"litellm_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/"}, "ChatFriendli": {"friendli.md": "https://python.langchain.com/v0.2/docs/integrations/chat/friendli/"}, "ChatMistralAI": {"If api_key is not passed, default behavior is to use the `MISTRAL_API_KEY` environment variable.": "https://python.langchain.com/v0.2/docs/integrations/chat/mistralai/", "mistralai.md": "https://python.langchain.com/v0.2/docs/integrations/providers/mistralai/", "Install a model capable of tool calling": "https://python.langchain.com/v0.2/docs/use_cases/extraction/quickstart/", "structured_output.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/structured_output/", "response_metadata.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/response_metadata/"}, "ChatZhipuAI": {"zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/"}, "create_json_chat_agent": {"zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/json_agent/"}, "ChatBaichuan": {"baichuan.md": "https://python.langchain.com/v0.2/docs/integrations/chat/baichuan/", "Baichuan": "https://python.langchain.com/v0.2/docs/integrations/providers/baichuan/"}, "Llama2Chat": {"!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/"}, "LlamaCpp": {"!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/providers/llamacpp/", "Callbacks support token-wise streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/"}, "QianfanChatEndpoint": {"baidu_qianfan_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/chat/baidu_qianfan_endpoint/", "ernie.md": "https://python.langchain.com/v0.2/docs/integrations/chat/ernie/", "Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/"}, "ChatEdenAI": {"edenai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/edenai/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "ErnieBotChat": {"ernie.md": "https://python.langchain.com/v0.2/docs/integrations/chat/ernie/"}, "ChatHunyuan": {"tencent_hunyuan.md": "https://python.langchain.com/v0.2/docs/integrations/chat/tencent_hunyuan/", "Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/"}, "MiniMaxChat": {"minimax.md": "https://python.langchain.com/v0.2/docs/integrations/chat/minimax/", "Minimax": "https://python.langchain.com/v0.2/docs/integrations/providers/minimax/"}, "ChatYuan2": {"yuan2.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/"}, "ChatTongyi": {"Install the package": "https://python.langchain.com/v0.2/docs/integrations/chat/tongyi/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/"}, "PromptLayerChatOpenAI": {"promptlayer_chatopenai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/promptlayer_chatopenai/", "PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/providers/promptlayer/"}, "ChatSparkLLM": {"sparkllm.md": "https://python.langchain.com/v0.2/docs/integrations/chat/sparkllm/"}, "MoonshotChat": {"Generate your api key from: https://platform.moonshot.cn/console/api-keys": "https://python.langchain.com/v0.2/docs/integrations/chat/moonshot/"}, "ChatDappierAI": {"dappier.md": "https://python.langchain.com/v0.2/docs/integrations/chat/dappier/"}, "ChatMaritalk": {"Loading the COMVEST 2024 notice": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/"}, "OnlinePDFLoader": {"Loading the COMVEST 2024 notice": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "load_qa_chain": {"Loading the COMVEST 2024 notice": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "You can store your OPENAI_API_KEY in a .env file as well": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/amazon_textract/", "sagemaker.md": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "adding_memory_chain_multiple_inputs.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory_chain_multiple_inputs/"}, "ChatPremAI": {"First step is to set up the env variable.": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/"}, "ChatAnyscale": {"Let\u2019s try out each model offered on Anyscale Endpoints {#lets-try-out-each-model-offered-on-anyscale-endpoints}": "https://python.langchain.com/v0.2/docs/integrations/chat/anyscale/", "Anyscale": "https://python.langchain.com/v0.2/docs/integrations/providers/anyscale/"}, "ChatYandexGPT": {"yandex.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yandex/", "Yandex": "https://python.langchain.com/v0.2/docs/integrations/providers/yandex/"}, "ChatPerplexity": {"perplexity.md": "https://python.langchain.com/v0.2/docs/integrations/chat/perplexity/"}, "ChatAnthropicTools": {"anthropic_functions.md": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic_functions/"}, "ChatMessage": {"Or via the async API": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/"}, "ConversationChain": {"Or via the async API": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "Guardrails for Amazon Bedrock with trace": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/", "!python -m spacy download en_core_web_lg": "https://python.langchain.com/v0.2/docs/modules/memory/custom_memory/", "Combined": "https://python.langchain.com/v0.2/docs/modules/memory/multiple_memory/", "Here it is by default set to \"AI\"": "https://python.langchain.com/v0.2/docs/modules/memory/conversational_customization/", "kg.md": "https://python.langchain.com/v0.2/docs/modules/memory/types/kg/", "We can see here that the buffer is updated": "https://python.langchain.com/v0.2/docs/modules/memory/types/token_buffer/", "Entity": "https://python.langchain.com/v0.2/docs/modules/memory/types/entity_summary_memory/", "Conversation Summary": "https://python.langchain.com/v0.2/docs/modules/memory/types/summary/", "Backed by a Vector Store": "https://python.langchain.com/v0.2/docs/modules/memory/types/vectorstore_retriever_memory/", "Conversation Buffer Window": "https://python.langchain.com/v0.2/docs/modules/memory/types/buffer_window/", "Conversation Buffer": "https://python.langchain.com/v0.2/docs/modules/memory/types/buffer/", "We can see here that there is a summary of the conversation and then some previous interactions": "https://python.langchain.com/v0.2/docs/modules/memory/types/summary_buffer/"}, "DeepEvalCallbackHandler": {"Here we want to make sure the answer is minimally relevant": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "Confident AI": "https://python.langchain.com/v0.2/docs/integrations/providers/confident/"}, "LLMonitorCallbackHandler": {"LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/providers/llmonitor/"}, "identify": {"LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/"}, "ContextCallbackHandler": {"context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "Context": "https://python.langchain.com/v0.2/docs/integrations/providers/context/"}, "FiddlerCallbackHandler": {"Fiddler project and model names, used for model registration": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "Fiddler": "https://python.langchain.com/v0.2/docs/integrations/providers/fiddler/"}, "FewShotChatMessagePromptTemplate": {"Fiddler project and model names, used for model registration": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "This is a prompt template used to format each individual example.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples_chat/"}, "LabelStudioCallbackHandler": {"labelstudio.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "Label Studio": "https://python.langchain.com/v0.2/docs/integrations/providers/labelstudio/"}, "CometTracer": {"Connect to Comet if no API Key is set": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/"}, "ArgillaCallbackHandler": {"argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Argilla": "https://python.langchain.com/v0.2/docs/integrations/providers/argilla/"}, "StdOutCallbackHandler": {"argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "Setup and use the ClearML Callback": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Callbacks": "https://python.langchain.com/v0.2/docs/modules/callbacks/index/", "this chain will both print to stdout (because verbose=True) and write to 'output.log'": "https://python.langchain.com/v0.2/docs/modules/callbacks/filecallbackhandler/"}, "PromptLayerCallbackHandler": {"promptlayer.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/providers/promptlayer/"}, "GPT4All": {"promptlayer.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/providers/gpt4all/", "Callbacks support token-wise streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/gpt4all/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/"}, "StreamlitCallbackHandler": {"Streamlit": "https://python.langchain.com/v0.2/docs/integrations/providers/streamlit/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/providers/gpt4all/"}, "MultiQueryRetriever": {"1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/"}, "UpTrainCallbackHandler": {"1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/providers/uptrain/"}, "TrubricsCallbackHandler": {"trubrics.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "Trubrics": "https://python.langchain.com/v0.2/docs/integrations/providers/trubrics/"}, "InfinoCallbackHandler": {"Install necessary dependencies.": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "Infino": "https://python.langchain.com/v0.2/docs/integrations/providers/infino/"}, "load_summarize_chain": {"Install necessary dependencies.": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "see https://python.langchain.com/v0.2/docs/use_cases/summarization for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/larksuite/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/"}, "FigmaFileLoader": {"Figma": "https://python.langchain.com/v0.2/docs/integrations/providers/figma/", "see https://python.langchain.com/en/latest/modules/data_connection/getting_started.html for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/"}, "Baseten": {"Baseten": "https://python.langchain.com/v0.2/docs/integrations/providers/baseten/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/baseten/"}, "WeatherDataLoader": {"Weather": "https://python.langchain.com/v0.2/docs/integrations/providers/weather/", "Set API key either by passing it in to constructor directly": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/weather/"}, "Tair": {"Tair": "https://python.langchain.com/v0.2/docs/integrations/providers/tair/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "drop first if index already exists": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/"}, "CollegeConfidentialLoader": {"College Confidential": "https://python.langchain.com/v0.2/docs/integrations/providers/college_confidential/", "college_confidential.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/college_confidential/"}, "RWKV": {"RWKV-4": "https://python.langchain.com/v0.2/docs/integrations/providers/rwkv/"}, "LakeFSLoader": {"lakeFS": "https://python.langchain.com/v0.2/docs/integrations/providers/lakefs/", "lakefs.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/lakefs/"}, "FaunaLoader": {"Fauna": "https://python.langchain.com/v0.2/docs/integrations/providers/fauna/", "fauna.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/fauna/"}, "OCIGenAI": {"Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/", "use default authN method API-key": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/"}, "OCIModelDeploymentVLLM": {"Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/", "Set authentication through ads": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_model_deployment_endpoint/"}, "OCIModelDeploymentTGI": {"Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/", "Set authentication through ads": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_model_deployment_endpoint/"}, "Lantern": {"Lantern": "https://python.langchain.com/v0.2/docs/integrations/providers/lantern/", "Pip install necessary package {#pip-install-necessary-package}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/"}, "SQLiteCache": {"From LangChain, import standard modules for prompting.": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/llm_caching/", "": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/chat_model_caching/"}, "set_llm_cache": {"From LangChain, import standard modules for prompting.": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/providers/mongodb_atlas/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/redis/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/", "Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/llm_caching/", "": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/chat_model_caching/"}, "Fireworks": {"Fireworks": "https://python.langchain.com/v0.2/docs/integrations/providers/fireworks/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/fireworks/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/self_ask_with_search/"}, "DropboxLoader": {"Dropbox": "https://python.langchain.com/v0.2/docs/integrations/providers/dropbox/", "Generate access token: https://www.dropbox.com/developers/apps/create.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/dropbox/"}, "ForefrontAI": {"ForefrontAI": "https://python.langchain.com/v0.2/docs/integrations/providers/forefrontai/", "get a new token: https://docs.forefront.ai/forefront/api-reference/authentication": "https://python.langchain.com/v0.2/docs/integrations/llms/forefrontai/"}, "CometCallbackHandler": {"os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/"}, "CTransformers": {"C Transformers": "https://python.langchain.com/v0.2/docs/integrations/providers/ctransformers/", "ctransformers.md": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/"}, "BiliBiliLoader": {"BiliBili": "https://python.langchain.com/v0.2/docs/integrations/providers/bilibili/", "bilibili.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/bilibili/"}, "TencentCOSDirectoryLoader": {"Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/", "tencent_cos_directory.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tencent_cos_directory/"}, "TencentCOSFileLoader": {"Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/", "tencent_cos_file.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tencent_cos_file/"}, "OBSDirectoryLoader": {"Huawei": "https://python.langchain.com/v0.2/docs/integrations/providers/huawei/", "Install the required package": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/huawei_obs_directory/"}, "OBSFileLoader": {"Huawei": "https://python.langchain.com/v0.2/docs/integrations/providers/huawei/", "Install the required package": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/huawei_obs_file/"}, "DiffbotLoader": {"Diffbot": "https://python.langchain.com/v0.2/docs/integrations/providers/diffbot/", "diffbot.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/diffbot/"}, "DeepSparse": {"DeepSparse": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/deepsparse-checkpoint/", "deepsparse.md": "https://python.langchain.com/v0.2/docs/integrations/llms/deepsparse/"}, "AimCallbackHandler": {"scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/"}, "ModernTreasuryLoader": {"Modern Treasury": "https://python.langchain.com/v0.2/docs/integrations/providers/modern_treasury/", "Create a vectorstore retriever from the loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/modern_treasury/"}, "GitHubIssuesLoader": {"GitHub": "https://python.langchain.com/v0.2/docs/integrations/providers/github/", "If you haven't set your access token as an environment variable, pass it in here.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/github/"}, "GithubFileLoader": {"GitHub": "https://python.langchain.com/v0.2/docs/integrations/providers/github/", "If you haven't set your access token as an environment variable, pass it in here.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/github/"}, "Banana": {"Banana": "https://python.langchain.com/v0.2/docs/integrations/providers/bananadev/", "Install the package https://docs.banana.dev/banana-docs/core-concepts/sdks/python": "https://python.langchain.com/v0.2/docs/integrations/llms/banana/"}, "InfinispanVS": {"Infinispan VS": "https://python.langchain.com/v0.2/docs/integrations/providers/infinispanvs/", "Ensure that all we need is installed": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/infinispanvs/"}, "CerebriumAI": {"CerebriumAI": "https://python.langchain.com/v0.2/docs/integrations/providers/cerebriumai/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/cerebriumai/"}, "GutenbergLoader": {"Gutenberg": "https://python.langchain.com/v0.2/docs/integrations/providers/gutenberg/", "gutenberg.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/gutenberg/"}, "WikipediaLoader": {"Wikipedia": "https://python.langchain.com/v0.2/docs/integrations/providers/wikipedia/", "wikipedia.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/wikipedia/", "diffbot.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/"}, "ConfluenceLoader": {"Confluence": "https://python.langchain.com/v0.2/docs/integrations/providers/confluence/", "confluence.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/confluence/"}, "Predibase": {"Predibase": "https://python.langchain.com/v0.2/docs/integrations/providers/predibase/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/predibase/"}, "Beam": {"Beam": "https://python.langchain.com/v0.2/docs/integrations/providers/beam/", "Set the environment variables": "https://python.langchain.com/v0.2/docs/integrations/llms/beam/"}, "GrobidParser": {"Grobid": "https://python.langchain.com/v0.2/docs/integrations/providers/grobid/", "grobid.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/grobid/"}, "GenericLoader": {"Grobid": "https://python.langchain.com/v0.2/docs/integrations/providers/grobid/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "grobid.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/grobid/", "Code for: class MyClass:": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "Custom Document Loader {#custom-document-loader}": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/custom/"}, "Typesense": {"Typesense": "https://python.langchain.com/v0.2/docs/integrations/providers/typesense/", "typesense.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/"}, "Hologres": {"Hologres": "https://python.langchain.com/v0.2/docs/integrations/providers/hologres/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "hologres.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/"}, "AI21": {"AI21 Labs": "https://python.langchain.com/v0.2/docs/integrations/providers/ai21/"}, "ArangoGraph": {"ArangoDB": "https://python.langchain.com/v0.2/docs/integrations/providers/arangodb/", "Instantiate ArangoDB Database": "https://python.langchain.com/v0.2/docs/integrations/graphs/arangodb/"}, "ArangoGraphQAChain": {"ArangoDB": "https://python.langchain.com/v0.2/docs/integrations/providers/arangodb/", "Instantiate ArangoDB Database": "https://python.langchain.com/v0.2/docs/integrations/graphs/arangodb/"}, "ArcGISLoader": {"ArcGIS": "https://python.langchain.com/v0.2/docs/integrations/providers/arcgis/", "arcgis.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/arcgis/"}, "WandbCallbackHandler": {"os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/"}, "ObsidianLoader": {"Obsidian": "https://python.langchain.com/v0.2/docs/integrations/providers/obsidian/", "obsidian.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/obsidian/"}, "create_sql_agent": {"CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Using LangSmith is recommended but not required. Uncomment below lines to use.": "https://python.langchain.com/v0.2/docs/use_cases/sql/csv/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/agents/"}, "SQLDatabaseToolkit": {"CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/"}, "Nebula": {"Nebula": "https://python.langchain.com/v0.2/docs/integrations/providers/symblai_nebula/", "symblai_nebula.md": "https://python.langchain.com/v0.2/docs/integrations/llms/symblai_nebula/"}, "Writer": {"Writer": "https://python.langchain.com/v0.2/docs/integrations/providers/writer/", "If you get an error, probably, you need to set up the \"base_url\" parameter that can be taken from the error log.": "https://python.langchain.com/v0.2/docs/integrations/llms/writer/"}, "BaichuanLLM": {"Baichuan": "https://python.langchain.com/v0.2/docs/integrations/providers/baichuan/", "Load the model": "https://python.langchain.com/v0.2/docs/integrations/llms/baichuan/"}, "ApacheDoris": {"Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/providers/apache_doris/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/"}, "ZepVectorStore": {"Zep": "https://python.langchain.com/v0.2/docs/integrations/providers/zep/", "Collection config is needed if we're creating a new Zep Collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/"}, "BrowserlessLoader": {"Browserless": "https://python.langchain.com/v0.2/docs/integrations/providers/browserless/", "browserless.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/browserless/"}, "AZLyricsLoader": {"AZLyrics": "https://python.langchain.com/v0.2/docs/integrations/providers/azlyrics/", "azlyrics.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azlyrics/"}, "ToMarkdownLoader": {"2Markdown": "https://python.langchain.com/v0.2/docs/integrations/providers/tomarkdown/", "You will need to get your own API key. See https://2markdown.com/login": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tomarkdown/"}, "Mlflow": {"MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/"}, "MlflowEmbeddings": {"MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/"}, "ChatMlflow": {"MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/"}, "GitLoader": {"Git": "https://python.langchain.com/v0.2/docs/integrations/providers/git/", "e.g. loading only python files": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/git/"}, "MlflowAIGateway": {"MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/"}, "MlflowAIGatewayEmbeddings": {"MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/"}, "ChatMLflowAIGateway": {"MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/"}, "Tigris": {"Tigris": "https://python.langchain.com/v0.2/docs/integrations/providers/tigris/", "tigris.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/"}, "Meilisearch": {"Meilisearch": "https://python.langchain.com/v0.2/docs/integrations/providers/meilisearch/", "Use Meilisearch vector store to store texts & associated embeddings as vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/"}, "SQLDatabaseChain": {"!pip3 install rebuff openai -U": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/"}, "SnowflakeLoader": {"Snowflake": "https://python.langchain.com/v0.2/docs/integrations/providers/snowflake/", "snowflake.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/snowflake/"}, "CubeSemanticLoader": {"Cube": "https://python.langchain.com/v0.2/docs/integrations/providers/cube/", "Read more about security context here: https://cube.dev/docs/security": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/cube_semantic/"}, "Clickhouse": {"ClickHouse": "https://python.langchain.com/v0.2/docs/integrations/providers/clickhouse/", "clickhouse.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/"}, "ClickhouseSettings": {"ClickHouse": "https://python.langchain.com/v0.2/docs/integrations/providers/clickhouse/", "clickhouse.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/"}, "ChatDatabricks": {"-> content='Hello! How can I assist you today?'": "https://python.langchain.com/v0.2/docs/integrations/providers/databricks/", "If running a Databricks notebook attached to an interactive cluster in \"single user\"": "https://python.langchain.com/v0.2/docs/integrations/llms/databricks/"}, "DatabricksEmbeddings": {"-> content='Hello! How can I assist you today?'": "https://python.langchain.com/v0.2/docs/integrations/providers/databricks/", "If running a Databricks notebook attached to an interactive cluster in \"single user\"": "https://python.langchain.com/v0.2/docs/integrations/llms/databricks/"}, "TelegramChatFileLoader": {"Telegram": "https://python.langchain.com/v0.2/docs/integrations/providers/telegram/", "telegram.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/telegram/"}, "TelegramChatApiLoader": {"Telegram": "https://python.langchain.com/v0.2/docs/integrations/providers/telegram/", "telegram.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/telegram/"}, "PredictionGuard": {"Prediction Guard": "https://python.langchain.com/v0.2/docs/integrations/providers/predictionguard/", "Optional, add your OpenAI API Key. This is optional, as Prediction Guard allows": "https://python.langchain.com/v0.2/docs/integrations/llms/predictionguard/"}, "Together": {"together.md": "https://python.langchain.com/v0.2/docs/integrations/llms/together/"}, "NotionDirectoryLoader": {"Notion DB": "https://python.langchain.com/v0.2/docs/integrations/providers/notion/", "notion.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/notion/"}, "NotionDBLoader": {"Notion DB": "https://python.langchain.com/v0.2/docs/integrations/providers/notion/", "notiondb.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/notiondb/"}, "MWDumpLoader": {"MediaWikiDump": "https://python.langchain.com/v0.2/docs/integrations/providers/mediawikidump/", "mediawiki-utilities supports XML schema 0.11 in unmerged branches": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mediawikidump/"}, "BraveSearchLoader": {"Brave Search": "https://python.langchain.com/v0.2/docs/integrations/providers/brave_search/", "brave_search.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/brave_search/"}, "StarRocks": {"StarRocks": "https://python.langchain.com/v0.2/docs/integrations/providers/starrocks/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/"}, "GooseAI": {"GooseAI": "https://python.langchain.com/v0.2/docs/integrations/providers/gooseai/", "gooseai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/gooseai/"}, "DatadogLogsLoader": {"Datadog Logs": "https://python.langchain.com/v0.2/docs/integrations/providers/datadog_logs/", "datadog_logs.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/datadog_logs/"}, "ApifyDatasetLoader": {"Apify": "https://python.langchain.com/v0.2/docs/integrations/providers/apify/", "apify_dataset.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/"}, "NLPCloud": {"NLPCloud": "https://python.langchain.com/v0.2/docs/integrations/providers/nlpcloud/", "get a token: https://docs.nlpcloud.com/#authentication": "https://python.langchain.com/v0.2/docs/integrations/llms/nlpcloud/"}, "SemaDB": {"SemaDB": "https://python.langchain.com/v0.2/docs/integrations/providers/semadb/", "Create collection if running for the first time. If the collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/"}, "GitbookLoader": {"GitBook": "https://python.langchain.com/v0.2/docs/integrations/providers/gitbook/", "show second document": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/gitbook/"}, "VoyageAIRerank": {"VoyageAI": "https://python.langchain.com/v0.2/docs/integrations/providers/voyageai/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/"}, "Rockset": {"Rockset": "https://python.langchain.com/v0.2/docs/integrations/providers/rockset/", "output length: 4": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/"}, "RocksetLoader": {"Rockset": "https://python.langchain.com/v0.2/docs/integrations/providers/rockset/", "Loading Documents {#loading-documents}": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rockset/"}, "Minimax": {"Minimax": "https://python.langchain.com/v0.2/docs/integrations/providers/minimax/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/minimax/"}, "UnstructuredAPIFileIOLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/"}, "UnstructuredAPIFileLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "# Install package": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/unstructured_file/"}, "UnstructuredCHMLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/"}, "UnstructuredCSVLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "csv.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/csv/"}, "UnstructuredEmailLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "email.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/email/"}, "UnstructuredEPubLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "epub.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/epub/"}, "UnstructuredFileIOLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "folder_id='1yucgL9WGgWZdM1TOuKkeghlPizuzMYb5'": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_drive/"}, "UnstructuredFileLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "# Install package": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/unstructured_file/"}, "UnstructuredHTMLLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "HTML": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/html/"}, "UnstructuredImageLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "image.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image/"}, "UnstructuredMarkdownLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "Markdown": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/markdown/"}, "UnstructuredODTLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "odt.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/odt/"}, "UnstructuredOrgModeLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "org_mode.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/org_mode/"}, "UnstructuredPDFLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "UnstructuredRSTLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "rst.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rst/"}, "UnstructuredRTFLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/"}, "UnstructuredTSVLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "tsv.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tsv/"}, "UnstructuredURLLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "url.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/url/"}, "UnstructuredXMLLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "xml.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/xml/"}, "SelfHostedPipeline": {"Runhouse": "https://python.langchain.com/v0.2/docs/integrations/providers/runhouse/", "For an on-demand A100 with GCP, Azure, or Lambda": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/"}, "SelfHostedHuggingFaceLLM": {"Runhouse": "https://python.langchain.com/v0.2/docs/integrations/providers/runhouse/", "For an on-demand A100 with GCP, Azure, or Lambda": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/"}, "MlflowCallbackHandler": {"SCENARIO 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/"}, "AstraDBVectorStore": {"Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/"}, "AstraDBCache": {"Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "AstraDBSemanticCache": {"Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "AstraDBLoader": {"Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/astradb/"}, "AstraDBStore": {"Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/stores/astradb/"}, "AstraDBByteStore": {"Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/stores/astradb/"}, "SpreedlyLoader": {"Spreedly": "https://python.langchain.com/v0.2/docs/integrations/providers/spreedly/", "Create a vectorstore retriever from the loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/spreedly/"}, "OpenLLM": {"OpenLLM": "https://python.langchain.com/v0.2/docs/integrations/providers/openllm/", "openllm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/openllm/"}, "PubMedLoader": {"PubMed": "https://python.langchain.com/v0.2/docs/integrations/providers/pubmed/", "pubmed.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pubmed/"}, "SearxSearchResults": {"SearxNG Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/searx/"}, "ActionServerToolkit": {"Robocorp": "https://python.langchain.com/v0.2/docs/integrations/providers/robocorp/", "Install package": "https://python.langchain.com/v0.2/docs/integrations/toolkits/robocorp/"}, "SpacyTextSplitter": {"spaCy": "https://python.langchain.com/v0.2/docs/integrations/providers/spacy/", "atlas.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/atlas/", "This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/split_by_token/"}, "Modal": {"Modal": "https://python.langchain.com/v0.2/docs/integrations/providers/modal/", "Register an account with Modal and get a new token.": "https://python.langchain.com/v0.2/docs/integrations/llms/modal/"}, "OpenCityDataLoader": {"Geopandas": "https://python.langchain.com/v0.2/docs/integrations/providers/geopandas/", "Load Open City Data": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/geopandas/", "open_city_data.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/open_city_data/"}, "PGEmbedding": {"Postgres Embedding": "https://python.langchain.com/v0.2/docs/integrations/providers/pg_embedding/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/"}, "SQLiteVSS": {"SQLite": "https://python.langchain.com/v0.2/docs/integrations/providers/sqlite/", "You need to install sqlite-vss as a dependency.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/"}, "Xinference": {"Xorbits Inference (Xinference)": "https://python.langchain.com/v0.2/docs/integrations/providers/xinference/", "xinference.md": "https://python.langchain.com/v0.2/docs/integrations/llms/xinference/"}, "IFixitLoader": {"iFixit": "https://python.langchain.com/v0.2/docs/integrations/providers/ifixit/", "ifixit.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/ifixit/"}, "AlephAlpha": {"Aleph Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/aleph_alpha/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/aleph_alpha/"}, "PipelineAI": {"PipelineAI": "https://python.langchain.com/v0.2/docs/integrations/providers/pipelineai/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/pipelineai/"}, "FacebookChatLoader": {"Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/", "pip install pandas": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/facebook_chat/"}, "Epsilla": {"Epsilla": "https://python.langchain.com/v0.2/docs/integrations/providers/epsilla/", "epsilla.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/"}, "AwaDB": {"AwaDB": "https://python.langchain.com/v0.2/docs/integrations/providers/awadb/", "awadb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/awadb/"}, "ArxivLoader": {"Arxiv": "https://python.langchain.com/v0.2/docs/integrations/providers/arxiv/", "arxiv.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/arxiv/"}, "BlockchainDocumentLoader": {"Alchemy": "https://python.langchain.com/v0.2/docs/integrations/providers/alchemy/", "get ALCHEMY_API_KEY from https://www.alchemy.com/": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/blockchain/"}, "BlockchainType": {"Alchemy": "https://python.langchain.com/v0.2/docs/integrations/providers/alchemy/", "get ALCHEMY_API_KEY from https://www.alchemy.com/": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/blockchain/"}, "Anyscale": {"Anyscale": "https://python.langchain.com/v0.2/docs/integrations/providers/anyscale/", "anyscale.md": "https://python.langchain.com/v0.2/docs/integrations/llms/anyscale/"}, "AINetworkToolkit": {"AINetwork": "https://python.langchain.com/v0.2/docs/integrations/providers/ainetwork/", "IMPORTANT: If you plan to use this account in the future, make sure to save the": "https://python.langchain.com/v0.2/docs/integrations/toolkits/ainetwork/"}, "StripeLoader": {"Stripe": "https://python.langchain.com/v0.2/docs/integrations/providers/stripe/", "Create a vectorstore retriever from the loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/stripe/"}, "StochasticAI": {"StochasticAI": "https://python.langchain.com/v0.2/docs/integrations/providers/stochasticai/", "stochasticai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/stochasticai/"}, "Bagel": {"BagelDB": "https://python.langchain.com/v0.2/docs/integrations/providers/bageldb/", "create cluster and add texts": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bageldb/"}, "TigerGraph": {"TigerGraph": "https://python.langchain.com/v0.2/docs/integrations/providers/tigergraph/"}, "BlackboardLoader": {"Blackboard": "https://python.langchain.com/v0.2/docs/integrations/providers/blackboard/", "blackboard.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/blackboard/"}, "YandexGPT": {"Yandex": "https://python.langchain.com/v0.2/docs/integrations/providers/yandex/", "yandex.md": "https://python.langchain.com/v0.2/docs/integrations/llms/yandex/"}, "LanceDB": {"LanceDB": "https://python.langchain.com/v0.2/docs/integrations/providers/lancedb/", "lancedb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "Vector stores": "https://python.langchain.com/v0.2/docs/modules/data_connection/vectorstores/index/"}, "UpstashRedisCache": {"Upstash Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/upstash/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "NucliaTextTransformer": {"Nuclia": "https://python.langchain.com/v0.2/docs/integrations/providers/nuclia/", "nuclia_transformer.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/nuclia_transformer/"}, "AnalyticDB": {"AnalyticDB": "https://python.langchain.com/v0.2/docs/integrations/providers/analyticdb/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "analyticdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/"}, "GoogleApiYoutubeLoader": {"YouTube": "https://python.langchain.com/v0.2/docs/integrations/providers/youtube/", "Init the GoogleApiClient": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_transcript/"}, "PromptLayerOpenAI": {"PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/providers/promptlayer/", "promptlayer_openai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/promptlayer_openai/"}, "USearch": {"USearch": "https://python.langchain.com/v0.2/docs/integrations/providers/usearch/", "usearch.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/"}, "EtherscanLoader": {"Etherscan": "https://python.langchain.com/v0.2/docs/integrations/providers/etherscan/", "etherscan.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/etherscan/"}, "Arcee": {"Arcee": "https://python.langchain.com/v0.2/docs/integrations/providers/arcee/", "Create an instance of the Arcee class": "https://python.langchain.com/v0.2/docs/integrations/llms/arcee/"}, "WhyLabsCallbackHandler": {"you don't need to call close to write profiles to WhyLabs, upload will occur periodically, but to demo let's not wait.": "https://python.langchain.com/v0.2/docs/integrations/providers/whylabs_profiling/"}, "IuguLoader": {"Iugu": "https://python.langchain.com/v0.2/docs/integrations/providers/iugu/", "Create a vectorstore retriever from the loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/iugu/"}, "CouchbaseLoader": {"Couchbase": "https://python.langchain.com/v0.2/docs/integrations/providers/couchbase/", "query is a valid SQL++ query": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/couchbase/"}, "FlyteCallbackHandler": {"Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/"}, "wandb_tracing_enabled": {"wandb documentation to configure wandb using env variables": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/"}, "ManifestWrapper": {"Hazy Research": "https://python.langchain.com/v0.2/docs/integrations/providers/hazy_research/", "Map reduce example": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/"}, "OntotextGraphDBGraph": {"Ontotext GraphDB": "https://python.langchain.com/v0.2/docs/integrations/providers/ontotext_graphdb/", "feeding the schema using a user construct query": "https://python.langchain.com/v0.2/docs/integrations/graphs/ontotext/"}, "OntotextGraphDBQAChain": {"Ontotext GraphDB": "https://python.langchain.com/v0.2/docs/integrations/providers/ontotext_graphdb/", "feeding the schema using a user construct query": "https://python.langchain.com/v0.2/docs/integrations/graphs/ontotext/"}, "Marqo": {"Marqo": "https://python.langchain.com/v0.2/docs/integrations/providers/marqo/", "initialize marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/"}, "IMSDbLoader": {"IMSDb": "https://python.langchain.com/v0.2/docs/integrations/providers/imsdb/", "imsdb.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/imsdb/"}, "TiDBLoader": {"TiDB": "https://python.langchain.com/v0.2/docs/integrations/providers/tidb/", "copy from tidb cloud console\uff0creplace it with your own": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tidb/"}, "TiDBVectorStore": {"TiDB": "https://python.langchain.com/v0.2/docs/integrations/providers/tidb/", "Here we useimport getpass": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/"}, "DeepInfra": {"DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/providers/deepinfra/", "get a new token: https://deepinfra.com/login?from=%2Fdash": "https://python.langchain.com/v0.2/docs/integrations/llms/deepinfra/"}, "RedditPostsLoader": {"Reddit": "https://python.langchain.com/v0.2/docs/integrations/providers/reddit/", "load using 'subreddit' mode": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/reddit/"}, "TrelloLoader": {"Trello": "https://python.langchain.com/v0.2/docs/integrations/providers/trello/", "If you have already set the API key and token using environment variables,": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/trello/"}, "AtlasDB": {"Atlas": "https://python.langchain.com/v0.2/docs/integrations/providers/atlas/", "atlas.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/atlas/"}, "SKLearnVectorStore": {"scikit-learn": "https://python.langchain.com/v0.2/docs/integrations/providers/sklearn/", "# if you plan to use bson serialization, install also:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/"}, "EverNoteLoader": {"EverNote": "https://python.langchain.com/v0.2/docs/integrations/providers/evernote/", "lxml and html2text are required to parse EverNote notes": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/evernote/"}, "VDMS": {"VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/"}, "VDMS_Client": {"VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/"}, "TwitterTweetLoader": {"Twitter": "https://python.langchain.com/v0.2/docs/integrations/providers/twitter/", "Or load from access token and consumer keys": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/twitter/"}, "DiscordChatLoader": {"Discord": "https://python.langchain.com/v0.2/docs/integrations/providers/discord/", "discord.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/discord/"}, "AssemblyAIAudioTranscriptLoader": {"AssemblyAI": "https://python.langchain.com/v0.2/docs/integrations/providers/assemblyai/", "or a local file path: audio_file = \"./nbc.mp3\"": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/assemblyai/"}, "RedisCache": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/redis/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "RedisSemanticCache": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/redis/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "Kinetica": {"Kinetica": "https://python.langchain.com/v0.2/docs/integrations/providers/kinetica/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/"}, "ClearMLCallbackHandler": {"Setup and use the ClearML Callback": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/"}, "create_cohere_react_agent": {"Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/"}, "SlackDirectoryLoader": {"Slack": "https://python.langchain.com/v0.2/docs/integrations/providers/slack/", "Optionally set your Slack URL. This will give you proper URLs in the docs sources.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/slack/"}, "Ollama": {"Ollama": "https://python.langchain.com/v0.2/docs/integrations/providers/ollama/", "ollama.md": "https://python.langchain.com/v0.2/docs/integrations/llms/ollama/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/"}, "HNLoader": {"Hacker News": "https://python.langchain.com/v0.2/docs/integrations/providers/hacker_news/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_spanner/", "hacker_news.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/hacker_news/"}, "CTranslate2": {"CTranslate2": "https://python.langchain.com/v0.2/docs/integrations/providers/ctranslate2/", "conversation can take several minutes": "https://python.langchain.com/v0.2/docs/integrations/llms/ctranslate2/"}, "QianfanLLMEndpoint": {"Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/", "baidu_qianfan_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/llms/baidu_qianfan_endpoint/"}, "BESVectorStore": {"Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/", "Create a bes instance and index docs.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/"}, "Aphrodite": {"PygmalionAI": "https://python.langchain.com/v0.2/docs/integrations/providers/pygmalionai/", "%pip list | grep aphrodite": "https://python.langchain.com/v0.2/docs/integrations/llms/aphrodite/"}, "PaiEasEndpoint": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "alibabacloud_pai_eas_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/llms/alibabacloud_pai_eas_endpoint/"}, "MaxComputeLoader": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "alibaba_cloud_maxcompute.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/alibaba_cloud_maxcompute/"}, "AlibabaCloudOpenSearch": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "for example": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/"}, "AlibabaCloudOpenSearchSettings": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "for example": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/"}, "DocusaurusLoader": {"Docusaurus": "https://python.langchain.com/v0.2/docs/integrations/providers/docusaurus/", "fixes a bug with asyncio and jupyter": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docusaurus/"}, "Annoy": {"Annoy": "https://python.langchain.com/v0.2/docs/integrations/providers/annoy/", "default metric is angular": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/"}, "BibtexLoader": {"BibTeX": "https://python.langchain.com/v0.2/docs/integrations/providers/bibtex/", "Create a dummy bibtex file and download a pdf.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/bibtex/"}, "Cassandra": {"Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/"}, "CassandraCache": {"Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "CassandraSemanticCache": {"Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "CassandraLoader": {"Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/cassandra/"}, "Vearch": {"Vearch": "https://python.langchain.com/v0.2/docs/integrations/providers/vearch/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/"}, "JoplinLoader": {"Joplin": "https://python.langchain.com/v0.2/docs/integrations/providers/joplin/", "joplin.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/joplin/"}, "ArthurCallbackHandler": {"arthur_tracking.md": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/"}, "AcreomLoader": {"Acreom": "https://python.langchain.com/v0.2/docs/integrations/providers/acreom/", "acreom.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/acreom/"}, "KDBAI": {"KDB.AI": "https://python.langchain.com/v0.2/docs/integrations/providers/kdbai/", "Clean up KDB.AI \"documents\" table and index for similarity search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/"}, "DuckDBLoader": {"DuckDB": "https://python.langchain.com/v0.2/docs/integrations/providers/duckdb/", "duckdb.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/duckdb/"}, "Petals": {"Petals": "https://python.langchain.com/v0.2/docs/integrations/providers/petals/", "this can take several minutes to download big files!": "https://python.langchain.com/v0.2/docs/integrations/llms/petals/"}, "MomentoCache": {"Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "MomentoVectorIndex": {"Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/"}, "NIBittensorLLM": {"Bittensor": "https://python.langchain.com/v0.2/docs/integrations/providers/bittensor/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/"}, "Neo4jVector": {"Neo4j": "https://python.langchain.com/v0.2/docs/integrations/providers/neo4j/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/prompting/"}, "Neo4jGraph": {"Neo4j": "https://python.langchain.com/v0.2/docs/integrations/providers/neo4j/", "diffbot.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/", "How many people played in Top Gun?": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/prompting/"}, "GraphCypherQAChain": {"Neo4j": "https://python.langchain.com/v0.2/docs/integrations/providers/neo4j/", "Creating and executing the seeding query": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/", "diffbot.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/", "How many people played in Top Gun?": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/prompting/"}, "DiffbotGraphTransformer": {"Neo4j": "https://python.langchain.com/v0.2/docs/integrations/providers/neo4j/", "diffbot.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/"}, "AirtableLoader": {"Airtable": "https://python.langchain.com/v0.2/docs/integrations/providers/airtable/", "airtable.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airtable/"}, "LarkSuiteDocLoader": {"ByteDance": "https://python.langchain.com/v0.2/docs/integrations/providers/byte_dance/", "see https://python.langchain.com/v0.2/docs/use_cases/summarization for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/larksuite/"}, "JavelinAIGateway": {"Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Step 4: Embeddings Example {#step-4-embeddings-example}": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/"}, "JavelinAIGatewayEmbeddings": {"Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Step 4: Embeddings Example {#step-4-embeddings-example}": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/"}, "ChatJavelinAIGateway": {"Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Step 4: Embeddings Example {#step-4-embeddings-example}": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/"}, "TensorflowDatasetLoader": {"TensorFlow Datasets": "https://python.langchain.com/v0.2/docs/integrations/providers/tensorflow_datasets/", "Feature structure of `mlqa/en` dataset:": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tensorflow_datasets/"}, "Clarifai": {"Clarifai": "https://python.langchain.com/v0.2/docs/integrations/providers/clarifai/", "Dependencies {#dependencies}": "https://python.langchain.com/v0.2/docs/integrations/llms/clarifai/"}, "DataheraldTextToSQL": {"Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/dataherald-checkpoint/"}, "RoamLoader": {"Roam": "https://python.langchain.com/v0.2/docs/integrations/providers/roam/", "roam.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/roam/"}, "create_openai_tools_agent": {"Construct the OpenAI Tools agent": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/slack/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/tool_usage/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/"}, "CONDENSE_QUESTION_PROMPT": {"Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/"}, "load_qa_with_sources_chain": {"Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/"}, "QA_PROMPT": {"Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/"}, "Chroma": {"Chroma": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/chroma-checkpoint/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/"}, "RedisStore": {"redis.md": "https://python.langchain.com/v0.2/docs/integrations/stores/redis/"}, "InMemoryByteStore": {"in_memory.md": "https://python.langchain.com/v0.2/docs/integrations/stores/in_memory/", "Swapping the `ByteStore` {#swapping-the-bytestore}": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/caching_embeddings/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/"}, "LocalFileStore": {"file_system.md": "https://python.langchain.com/v0.2/docs/integrations/stores/file_system/", "Swapping the `ByteStore` {#swapping-the-bytestore}": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/caching_embeddings/"}, "CacheBackedEmbeddings": {"astradb.md": "https://python.langchain.com/v0.2/docs/integrations/stores/astradb/", "Swapping the `ByteStore` {#swapping-the-bytestore}": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/caching_embeddings/"}, "UpstashRedisByteStore": {"upstash_redis.md": "https://python.langchain.com/v0.2/docs/integrations/stores/upstash_redis/"}, "ConneryToolkit": {"Specify your Connery Runner credentials.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/"}, "create_csv_agent": {"Create a dataframe": "https://python.langchain.com/v0.2/docs/integrations/toolkits/csv/"}, "create_xorbits_agent": {"xorbits.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/xorbits/"}, "JiraToolkit": {"jira.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/"}, "JiraAPIWrapper": {"jira.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/"}, "create_spark_dataframe_agent": {"in apache-spark root directory. (tested here with \"spark-3.4.0-bin-hadoop3 and later\")": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark/"}, "PyPDFLoader": {"document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "TODO : Set values as per your requirements": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Clean up KDB.AI \"documents\" table and index for similarity search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "initialize MongoDB python client": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "merge_doc.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/merge_doc/", "google_cloud_storage_file.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_cloud_storage_file/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "set_debug": {"document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "textgen.md": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Debugging": "https://python.langchain.com/v0.2/docs/guides/development/debugging/"}, "PythonREPLTool": {"Define the neural network": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/"}, "create_pbi_agent": {"fictional example": "https://python.langchain.com/v0.2/docs/integrations/toolkits/powerbi/"}, "AzureCognitiveServicesToolkit": {"For Windows/Linux": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_cognitive_services/"}, "Requests": {"Select the LLM to use. Here, we use gpt-3.5-turbo-instruct": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/"}, "NLAToolkit": {"Select the LLM to use. Here, we use gpt-3.5-turbo-instruct": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/"}, "build_resource_service": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gmail/"}, "get_gmail_credentials": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gmail/"}, "SlackToolkit": {"Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/slack/"}, "SteamToolkit": {"steam.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/"}, "SteamWebAPIWrapper": {"steam.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/"}, "create_json_agent": {"json.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/json/"}, "JsonToolkit": {"json.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/json/"}, "JsonSpec": {"json.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/json/", "NOTE: In this example. We must set `allow_dangerous_request=True` to enable the OpenAPI Agent to automatically use the Request Tool.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "AirbyteStripeLoader": {"airbyte_structured_qa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/airbyte_structured_qa/", "airbyte_stripe.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_stripe/"}, "create_pandas_dataframe_agent": {"airbyte_structured_qa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/airbyte_structured_qa/", "pandas.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/pandas/", "Using LangSmith is recommended but not required. Uncomment below lines to use.": "https://python.langchain.com/v0.2/docs/use_cases/sql/csv/"}, "GitHubToolkit": {"Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/"}, "GitHubAPIWrapper": {"Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/"}, "ConversationSummaryBufferMemory": {"Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "We can see here that there is a summary of the conversation and then some previous interactions": "https://python.langchain.com/v0.2/docs/modules/memory/types/summary_buffer/"}, "render_text_description_and_args": {"Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/"}, "ClickupToolkit": {"Copilot Sandbox": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/"}, "ClickupAPIWrapper": {"Copilot Sandbox": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/"}, "create_spark_sql_agent": {"Note, you can also connect to Spark via Spark connect. For example:": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark_sql/"}, "SparkSQLToolkit": {"Note, you can also connect to Spark via Spark connect. For example:": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark_sql/"}, "SparkSQL": {"Note, you can also connect to Spark via Spark connect. For example:": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark_sql/"}, "PlayWrightBrowserToolkit": {"If this is your first time using playwright, you'll have to install a browser executable.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/"}, "create_async_playwright_browser": {"If this is your first time using playwright, you'll have to install a browser executable.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/"}, "create_conversational_retrieval_agent": {"cogniswitch.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cogniswitch/"}, "CogniswitchToolkit": {"cogniswitch.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cogniswitch/"}, "NasaToolkit": {"nasa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/"}, "NasaAPIWrapper": {"nasa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/"}, "MultionToolkit": {"Authorize connection to your Browser extention": "https://python.langchain.com/v0.2/docs/integrations/toolkits/multion/"}, "AmadeusToolkit": {"Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/"}, "AzureAiServicesToolkit": {"azure_ai_services.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_ai_services/"}, "create_structured_chat_agent": {"azure_ai_services.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_ai_services/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/structured_chat/"}, "reduce_openapi_spec": {"NOTE: In this example. We must set `allow_dangerous_request=True` to enable the OpenAPI Agent to automatically use the Request Tool.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "RequestsWrapper": {"NOTE: In this example. We must set `allow_dangerous_request=True` to enable the OpenAPI Agent to automatically use the Request Tool.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "create_openapi_agent": {"NOTE: In this example. We must set `allow_dangerous_request=True` to enable the OpenAPI Agent to automatically use the Request Tool.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "OpenAPIToolkit": {"NOTE: In this example. We must set `allow_dangerous_request=True` to enable the OpenAPI Agent to automatically use the Request Tool.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "GitLabToolkit": {"Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/"}, "GitLabAPIWrapper": {"Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/"}, "PolygonToolkit": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/toolkits/polygon/"}, "ApacheDorisSettings": {"load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/"}, "DistanceStrategy": {"Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_bigquery_vector_search/", "Create collection if running for the first time. If the collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/"}, "KineticaSettings": {"Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/"}, "SentenceTransformerEmbeddings": {"You need to install sqlite-vss as a dependency.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/", "docs[0].metadata[\"id\"] == \"id:testapp:testapp::32\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/", "import": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/"}, "Vald": {"Refresh is required for server use": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/"}, "RetrievalQAWithSourcesChain": {"install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "initialize marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "Uncomment this to install psychicapi if you don't already have it installed": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "Yellowbrick": {"Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/"}, "LLMRails": {"Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/llm_rails/"}, "HanaDB": {"Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/"}, "VectorSearchVectorStoreDatastore": {"TODO : Set values as per your requirements": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/"}, "VertexAI": {"TODO : Set values as per your requirements": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "google_vertex_ai_palm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/"}, "NucliaDB": {"nucliadb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/nucliadb/"}, "Hippo": {"openai": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/"}, "RedisText": {"connection to redis standalone at localhost, db 0, no password": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "RedisNum": {"connection to redis standalone at localhost, db 0, no password": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "RedisTag": {"connection to redis standalone at localhost, db 0, no password": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "RedisFilter": {"connection to redis standalone at localhost, db 0, no password": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "VespaStore": {"docs[0].metadata[\"id\"] == \"id:testapp:testapp::32\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/"}, "CosmosDBSimilarityType": {"Set up the OpenAI Environment Variables": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "CosmosDBVectorSearchType": {"Set up the OpenAI Environment Variables": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "NeuralDBVectorStore": {"From scratch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/thirdai_neuraldb/"}, "VikingDB": {"vikingdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/"}, "VikingDBConfig": {"vikingdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/"}, "InMemoryDocstore": {"default metric is angular": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Define your embedding model": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/time_weighted_vectorstore/", "Backed by a Vector Store": "https://python.langchain.com/v0.2/docs/modules/memory/types/vectorstore_retriever_memory/"}, "CouchbaseVectorStore": {"Wait until the cluster is ready for use.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/"}, "VLite": {"Load the document and split it into chunks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vlite/"}, "DuckDB": {"duckdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/"}, "StarRocksSettings": {"load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/"}, "PathwayVectorClient": {"take into account only sources modified later than unix timestamp": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pathway/"}, "DocArrayHnswSearch": {"Get an OpenAI token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_hnsw/"}, "TileDB": {"tiledb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/"}, "EcloudESVectorStore": {"ecloud_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/"}, "SurrealDBStore": {"%pip install --upgrade --quiet surrealdb langchain langchain-community": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/"}, "ElasticVectorSearch": {"Metadata {#metadata}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/"}, "PGVecto_rs": {"Run tests with shell:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/"}, "JSONLoader": {"Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "JSON": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/json/"}, "CollectionConfig": {"Collection config is needed if we're creating a new Zep Collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/"}, "BaiduVectorDB": {"baiduvectordb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/"}, "openai": {"openai-old.md": "https://python.langchain.com/v0.2/docs/integrations/adapters/openai-old/", "openai.md": "https://python.langchain.com/v0.2/docs/integrations/adapters/openai/"}, "AsyncChromiumLoader": {"Load HTML": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/beautiful_soup/", "async_chromium.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/async_chromium/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "BeautifulSoupTransformer": {"Load HTML": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/beautiful_soup/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "OpenVINOReranker": {"Helper function for printing docs": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/"}, "create_metadata_tagger": {"Must be an OpenAI model that supports functions": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/"}, "DoctranPropertyExtractor": {"doctran_extract_properties.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_extract_properties/"}, "DoctranQATransformer": {"doctran_interrogate_document.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_interrogate_document/"}, "CrossEncoderReranker": {"OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/"}, "HuggingFaceCrossEncoder": {"OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/"}, "DoctranTextTranslator": {"doctran_translate_document.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_translate_document/"}, "XorbitsLoader": {"Use lazy load for larger table, which won't read the full table into memory": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/xorbits/"}, "OutlookMessageLoader": {"email.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/email/"}, "TranscriptFormat": {"or a local file path: audio_file = \"./nbc.mp3\"": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/assemblyai/"}, "AirbyteSalesforceLoader": {"airbyte_salesforce.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_salesforce/"}, "AirbyteCDKLoader": {"airbyte_cdk.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_cdk/"}, "Docx2txtLoader": {"microsoft_word.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_word/"}, "RSpaceLoader": {"rspace.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rspace/"}, "SeleniumURLLoader": {"url.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/url/"}, "PlaywrightURLLoader": {"url.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/url/"}, "AirbyteJSONLoader": {"airbyte_json.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_json/"}, "GeoDataFrameLoader": {"Load Open City Data": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/geopandas/"}, "AirbyteTypeformLoader": {"airbyte_typeform.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_typeform/"}, "MHTMLLoader": {"Create a new loader object for the MHTML file": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mhtml/"}, "NewsURLLoader": {"news.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/news/"}, "ImageCaptionLoader": {"image_captions.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image_captions/"}, "LLMSherpaFileLoader": {"Install package": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/llmsherpa/"}, "NucliaLoader": {"nuclia.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/nuclia/"}, "TomlLoader": {"toml.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/toml/"}, "PsychicLoader": {"Uncomment this to install psychicapi if you don't already have it installed": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/"}, "FireCrawlLoader": {"firecrawl.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/firecrawl/", "HTML": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/html/"}, "FakeListLLM": {"see https://python.langchain.com/v0.2/docs/use_cases/summarization for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/larksuite/", "Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "MergedDataLoader": {"merge_doc.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/merge_doc/"}, "RecursiveUrlLoader": {"Parameters {#parameters}": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/recursive_url/"}, "AirbyteHubspotLoader": {"airbyte_hubspot.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_hubspot/"}, "AirbyteGongLoader": {"airbyte_gong.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_gong/"}, "ReadTheDocsLoader": {"readthedocs_documentation.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/readthedocs_documentation/"}, "PolarsDataFrameLoader": {"Use lazy load for larger table, which won't read the full table into memory": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/polars_dataframe/"}, "DataFrameLoader": {"Use lazy load for larger table, which won't read the full table into memory": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pandas_dataframe/"}, "SurrealDBLoader": {"%pip install --upgrade --quiet surrealdb langchain langchain-community": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/surrealdb/"}, "GoogleApiClient": {"Init the GoogleApiClient": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_transcript/"}, "ConcurrentLoader": {"concurrent.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/concurrent/"}, "RSSFeedLoader": {"rss.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rss/"}, "PebbloSafeLoader": {"pebblo.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pebblo/"}, "VsdxLoader": {"vsdx.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/vsdx/"}, "NotebookLoader": {"jupyter_notebook.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/jupyter_notebook/"}, "OracleAutonomousDatabaseLoader": {"oracleadb_loader.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/oracleadb_loader/"}, "LanguageParser": {"Code for: class MyClass:": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/"}, "Language": {"Code for: class MyClass:": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "Full list of supported languages": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/code_splitter/"}, "SRTLoader": {"subtitle.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/subtitle/"}, "MastodonTootsLoader": {"Or set up access information to use a Mastodon app.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mastodon/"}, "AirbyteShopifyLoader": {"airbyte_shopify.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_shopify/"}, "GlueCatalogLoader": {"glue_catalog.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/glue_catalog/"}, "PySparkDataFrameLoader": {"pyspark_dataframe.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pyspark_dataframe/"}, "AirbyteZendeskSupportLoader": {"airbyte_zendesk_support.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_zendesk_support/"}, "CoNLLULoader": {"conll-u.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/conll-u/"}, "MongodbLoader": {"add this import for running in jupyter notebook": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mongodb/"}, "SitemapLoader": {"fixes a bug with asyncio and jupyter": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/sitemap/"}, "YuqueLoader": {"yuque.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/yuque/"}, "QuipLoader": {"quip.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/quip/"}, "MemgraphGraph": {"Creating and executing the seeding query": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/"}, "GraphSparqlQAChain": {"rdflib_sparql.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/rdflib_sparql/"}, "RdfGraph": {"rdflib_sparql.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/rdflib_sparql/"}, "NebulaGraphQAChain": {"connect ngql jupyter extension to nebulagraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/nebula_graph/"}, "NebulaGraph": {"connect ngql jupyter extension to nebulagraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/nebula_graph/"}, "GremlinQAChain": {"The underlying python-gremlin has a problem when running in notebook": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "GremlinGraph": {"The underlying python-gremlin has a problem when running in notebook": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "GraphDocument": {"The underlying python-gremlin has a problem when running in notebook": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "Node": {"The underlying python-gremlin has a problem when running in notebook": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "Relationship": {"The underlying python-gremlin has a problem when running in notebook": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "GraphIndexCreator": {"networkx.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/"}, "GraphQAChain": {"networkx.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/"}, "NetworkxEntityGraph": {"networkx.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/"}, "HugeGraphQAChain": {"graph.refresh_schema()": "https://python.langchain.com/v0.2/docs/integrations/graphs/hugegraph/"}, "HugeGraph": {"graph.refresh_schema()": "https://python.langchain.com/v0.2/docs/integrations/graphs/hugegraph/"}, "AGEGraph": {"How many people played in Top Gun?": "https://python.langchain.com/v0.2/docs/integrations/graphs/apache_age/"}, "NeptuneSparqlQAChain": {"Optionally change the schema": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_sparql/"}, "NeptuneRdfGraph": {"Optionally change the schema": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_sparql/"}, "NeptuneGraph": {"amazon_neptune_open_cypher.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/"}, "NeptuneAnalyticsGraph": {"amazon_neptune_open_cypher.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/"}, "NeptuneOpenCypherQAChain": {"amazon_neptune_open_cypher.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/"}, "KuzuQAChain": {"graph.refresh_schema()": "https://python.langchain.com/v0.2/docs/integrations/graphs/kuzu_db/"}, "KuzuGraph": {"graph.refresh_schema()": "https://python.langchain.com/v0.2/docs/integrations/graphs/kuzu_db/"}, "FalkorDBQAChain": {"falkordb.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/falkordb/"}, "FalkorDBGraph": {"falkordb.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/falkordb/"}, "ConversationBufferWindowMemory": {"Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/baseten/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Conversation Buffer Window": "https://python.langchain.com/v0.2/docs/modules/memory/types/buffer_window/"}, "Solar": {"solar.md": "https://python.langchain.com/v0.2/docs/integrations/llms/solar/"}, "IpexLLM": {"Update Langchain": "https://python.langchain.com/v0.2/docs/integrations/llms/ipex_llm/"}, "SagemakerEndpoint": {"sagemaker.md": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/", "Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "LLMContentHandler": {"sagemaker.md": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/", "Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "OctoAIEndpoint": {"octoai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/octoai/"}, "TextGen": {"textgen.md": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/"}, "MosaicML": {"sign up for an account: https://forms.mosaicml.com/demo?utm_source=langchain": "https://python.langchain.com/v0.2/docs/integrations/llms/mosaicml/"}, "VolcEngineMaasLLM": {"Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/volcengine_maas/"}, "KoboldApiLLM": {"koboldai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/koboldai/"}, "Konko": {"konko.md": "https://python.langchain.com/v0.2/docs/integrations/llms/konko/"}, "AsyncCallbackHandler": {"Guardrails for Amazon Bedrock with trace": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/", "To enable streaming, we pass in `streaming=True` to the ChatModel constructor": "https://python.langchain.com/v0.2/docs/modules/callbacks/async_callbacks/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/"}, "set_verbose": {"install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Debugging": "https://python.langchain.com/v0.2/docs/guides/development/debugging/"}, "OpaquePrompts": {"install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/"}, "TitanTakeoff": {"Note importing TitanTakeoffPro instead of TitanTakeoff will work as well both use same object under the hood": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/"}, "Friendli": {"friendli.md": "https://python.langchain.com/v0.2/docs/integrations/llms/friendli/"}, "Databricks": {"If running a Databricks notebook attached to an interactive cluster in \"single user\"": "https://python.langchain.com/v0.2/docs/integrations/llms/databricks/"}, "LMFormatEnforcer": {"lmformatenforcer_experimental.md": "https://python.langchain.com/v0.2/docs/integrations/llms/lmformatenforcer_experimental/"}, "VLLM": {"vllm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/"}, "VLLMOpenAI": {"vllm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/"}, "CustomOpenAIContentFormatter": {"azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "ContentFormatterBase": {"azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "DollyContentFormatter": {"azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "load_llm": {"azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "MapReduceChain": {"Map reduce example": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/"}, "ModelLaboratory": {"Map reduce example": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/"}, "RELLM": {"We'll choose a regex that matches to a structured json string that looks like:": "https://python.langchain.com/v0.2/docs/integrations/llms/rellm_experimental/"}, "Yuan2": {"default infer_api for a local deployed Yuan2.0 inference server": "https://python.langchain.com/v0.2/docs/integrations/llms/yuan2/"}, "InMemoryCache": {"To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/llm_caching/", "": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/chat_model_caching/"}, "GPTCache": {"To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "SQLAlchemyCache": {"To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "AzureCosmosDBSemanticCache": {"To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "SparkLLM": {"Load the model": "https://python.langchain.com/v0.2/docs/integrations/llms/sparkllm/"}, "Moonshot": {"Generate your api key from: https://platform.moonshot.cn/console/api-keys": "https://python.langchain.com/v0.2/docs/integrations/llms/moonshot/"}, "OpenLM": {"Uncomment to install openlm and openai if you haven't already": "https://python.langchain.com/v0.2/docs/integrations/llms/openlm/"}, "CloudflareWorkersAI": {"Using streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/cloudflare_workersai/"}, "ChatGLM3": {"Install required dependencies": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/"}, "ChatGLM": {"Install required dependencies": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/"}, "Llamafile": {"llamafile.md": "https://python.langchain.com/v0.2/docs/integrations/llms/llamafile/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/"}, "LayerupSecurity": {"Layerup Security": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/layerup_security/"}, "JsonFormer": {"jsonformer_experimental.md": "https://python.langchain.com/v0.2/docs/integrations/llms/jsonformer_experimental/"}, "WeightOnlyQuantPipeline": {"weight_only_quantization.md": "https://python.langchain.com/v0.2/docs/integrations/llms/weight_only_quantization/"}, "Replicate": {"magics to auto-reload external modules in case you are making changes to langchain while working on this notebook": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/"}, "create_history_aware_retriever": {"Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/chat_history/"}, "BaseOutputParser": {"Quickstart": "https://python.langchain.com/v0.2/docs/get_started/.ipynb_checkpoints/quickstart-checkpoint/", "The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/"}, "ConditionalPromptSelector": {"Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/"}, "DatetimeOutputParser": {"Note that we set max_retries = 0 to avoid retrying on RateLimits, etc": "https://python.langchain.com/v0.2/docs/guides/productionization/fallbacks/", "datetime.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/datetime/"}, "HuggingFaceInjectionIdentifier": {"Using https://huggingface.co/laiyer/deberta-v3-base-prompt-injection": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/hugging_face_prompt_injection/"}, "load_chain": {"Using https://huggingface.co/laiyer/deberta-v3-base-prompt-injection": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/hugging_face_prompt_injection/"}, "FallacyChain": {"Logical Fallacy chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/logical_fallacy_chain/"}, "ModerationPiiError": {"Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "BaseModerationConfig": {"Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "ModerationPiiConfig": {"Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "ModerationPromptSafetyConfig": {"Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "ModerationToxicityConfig": {"Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "BaseModerationCallbackHandler": {"Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "ConstitutionalChain": {"Constitutional chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/constitutional_chain/"}, "ConstitutionalPrinciple": {"Constitutional chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/constitutional_chain/"}, "format_document": {"QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/"}, "runnable": {"Multi-language data anonymization with Microsoft Presidio {#multi-language-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/multi_language/"}, "case_insensitive_matching_strategy": {"Reversible data anonymization with Microsoft Presidio {#reversible-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/reversible/"}, "fuzzy_matching_strategy": {"Reversible data anonymization with Microsoft Presidio {#reversible-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/reversible/"}, "combined_exact_fuzzy_matching_strategy": {"Reversible data anonymization with Microsoft Presidio {#reversible-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/reversible/"}, "load_evaluator": {"Initialize the language model": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/examples/comparisons/", "ANTHROPIC_API_KEY=": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/trajectory_eval/", "pairwise_embedding_distance.md": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/comparison/pairwise_embedding_distance/", "The prompt was assigned to the evaluator": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/comparison/pairwise_string/", "This is equivalent to loading using the enum": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/criteria_eval_chain/", "Check for the presence of a YYYY-MM-DD string.": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/regex_match/", "Correct": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/scoring_eval_chain/", "Alternatively": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/exact_match/", "The results purely character-based, so it's less useful when negation is concerned": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/string_distance/", "You can load by enum or by raw python string": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/embedding_distance/"}, "load_dataset": {"Initialize the language model": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/examples/comparisons/"}, "AgentTrajectoryEvaluator": {"custom.md": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/custom/"}, "EmbeddingDistance": {"pairwise_embedding_distance.md": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/comparison/pairwise_embedding_distance/", "You can load by enum or by raw python string": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/embedding_distance/"}, "PairwiseStringEvaluator": {"%env ANTHROPIC_API_KEY=YOUR_API_KEY": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/comparison/custom/"}, "Criteria": {"This is equivalent to loading using the enum": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/criteria_eval_chain/"}, "JsonValidityEvaluator": {"Equivalently": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/json/"}, "JsonEqualityEvaluator": {"Equivalently": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/json/"}, "JsonEditDistanceEvaluator": {"Equivalently": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/json/"}, "JsonSchemaEvaluator": {"Equivalently": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/json/"}, "RegexMatchStringEvaluator": {"Check for the presence of a YYYY-MM-DD string.": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/regex_match/"}, "StringEvaluator": {"The perplexity is much higher since LangChain was introduced after 'gpt-2' was released and because it is never used in the following context.": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/custom/"}, "ExactMatchStringEvaluator": {"Alternatively": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/exact_match/"}, "StringDistance": {"The results purely character-based, so it's less useful when negation is concerned": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/string_distance/"}, "WebResearchRetriever": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "StuffDocumentsChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/", "Get embeddings.": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/long_context_reorder/"}, "MapReduceDocumentsChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/"}, "ReduceDocumentsChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/"}, "AnalyzeDocumentChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/"}, "get_openapi_chain": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/apis/"}, "APIChain": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/apis/"}, "open_meteo_docs": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/apis/"}, "tmdb_docs": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/apis/"}, "podcast_docs": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/apis/"}, "LLMRequestsChain": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/apis/"}, "FewShotPromptTemplate": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "Select the most similar example to the input.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples/", "Examples of a pretend task of creating antonyms.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/similarity/", "index.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/index/", "Examples of a fictional translation task.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/ngram_overlap/"}, "OPENAI_TEMPLATE": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/"}, "create_openai_data_generator": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/"}, "DatasetGenerator": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/"}, "create_data_generation_chain": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/"}, "create_extraction_chain_pydantic": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/"}, "PydanticOutputParser": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/", "Set up a parser": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/parse/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/", "Define your desired data structure.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/pydantic/", "retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/", "output_fixing.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/output_fixing/"}, "create_tool_calling_agent": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/agents/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Construct the Tools agent": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/tool_calling/", "!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/"}, "Runnable": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/human_in_the_loop/"}, "RunnableConfig": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "Run custom functions {#run-custom-functions}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/functions/"}, "ToolCall": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/"}, "JsonOutputParser": {"If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/prompting/", "Define your desired data structure.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/json/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/"}, "ConfigurableField": {"This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/", "initialize the bm25 retriever and faiss retriever": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/ensemble/", "batch_configurable_chain([\"ice cream\", \"spaghetti\", \"dumplings\"])": "https://python.langchain.com/v0.2/docs/expression_language/why/", "Configure chain internals at runtime {#configure-chain-internals-at-runtime}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/configure/"}, "RunnableBinding": {"This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/"}, "RunnablePick": {"Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/"}, "ChatMessageHistory": {"import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/chat_history/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "agent_with_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory/", "Conversation Summary": "https://python.langchain.com/v0.2/docs/modules/memory/types/summary/", "Chat Messages": "https://python.langchain.com/v0.2/docs/modules/memory/chat_messages/index/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/"}, "BaseChatMessageHistory": {"import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/chat_history/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/"}, "LogStreamCallbackHandler": {"import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/"}, "JsonOutputKeyToolsParser": {"Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "Using LangSmith is recommended but not required. Uncomment below lines to use.": "https://python.langchain.com/v0.2/docs/use_cases/sql/csv/", "openai_tools.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_tools/"}, "ChatAnthropicMessages": {"Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/"}, "XMLOutputParser": {"Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "xml.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/xml/"}, "EmbeddingsFilter": {"Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/"}, "PydanticToolsParser": {"%pip install -qU langchain langchain-community langchain-openai langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/no_queries/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/hyde/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/step_back/", "Tool calling {#tool-calling}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/function_calling/", "Function calling": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/function_calling-checkpoint/", "openai_tools.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_tools/"}, "chain": {"%pip install -qU langchain langchain-community langchain-openai langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/no_queries/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "decorator.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/decorator/"}, "Comparator": {"constructing-filters.md": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/constructing-filters/"}, "Comparison": {"constructing-filters.md": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/constructing-filters/"}, "Operation": {"constructing-filters.md": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/constructing-filters/"}, "Operator": {"constructing-filters.md": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/constructing-filters/"}, "StructuredQuery": {"constructing-filters.md": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/constructing-filters/"}, "ChromaTranslator": {"constructing-filters.md": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/constructing-filters/", "This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/"}, "ElasticsearchTranslator": {"constructing-filters.md": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/constructing-filters/"}, "LLMGraphTransformer": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/constructing/"}, "CypherQueryCorrector": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/mapping/"}, "Schema": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/mapping/"}, "AsyncCallbackManagerForToolRun": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "Import things that are needed generically": "https://python.langchain.com/v0.2/docs/modules/tools/custom_tools/"}, "CallbackManagerForToolRun": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "Import things that are needed generically": "https://python.langchain.com/v0.2/docs/modules/tools/custom_tools/"}, "BaseTool": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "Import things that are needed generically": "https://python.langchain.com/v0.2/docs/modules/tools/custom_tools/", "Function calling": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/function_calling-checkpoint/"}, "format_to_openai_function_messages": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/"}, "OpenAIFunctionsAgentOutputParser": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/"}, "convert_to_openai_function": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "tools_as_openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/tools/tools_as_openai_functions/"}, "SemanticSimilarityExampleSelector": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "Select the most similar example to the input.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples/", "This is a prompt template used to format each individual example.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples_chat/", "Examples of a pretend task of creating antonyms.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/similarity/"}, "RunnableBranch": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/quickstart/", "Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/"}, "BSHTMLLoader": {"Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "HTML": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/html/"}, "create_structured_output_runnable": {"Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/"}, "BS4HTMLParser": {"Configure the parsers that you want to use per mime-type!": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_files/"}, "PDFMinerParser": {"Configure the parsers that you want to use per mime-type!": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_files/"}, "MimeTypeBasedParser": {"Configure the parsers that you want to use per mime-type!": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_files/"}, "TextParser": {"Configure the parsers that you want to use per mime-type!": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_files/"}, "PythonAstREPLTool": {"Using LangSmith is recommended but not required. Uncomment below lines to use.": "https://python.langchain.com/v0.2/docs/use_cases/sql/csv/"}, "create_sql_query_chain": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/"}, "QuerySQLDataBaseTool": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/quickstart/"}, "SQLRecordManager": {"indexing.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/indexing/"}, "index": {"indexing.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/indexing/"}, "BaseLoader": {"indexing.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/indexing/", "Custom Document Loader {#custom-document-loader}": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/custom/"}, "EnsembleRetriever": {"initialize the bm25 retriever and faiss retriever": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/ensemble/"}, "JsonKeyOutputFunctionsParser": {"The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_functions/", "prompt_llm_parser.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_llm_parser/"}, "LLMChainExtractor": {"Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/"}, "LLMChainFilter": {"Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/"}, "DocumentCompressorPipeline": {"Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/"}, "CallbackManagerForRetrieverRun": {"Custom Retriever {#custom-retriever}": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/custom_retriever/"}, "BaseRetriever": {"Custom Retriever {#custom-retriever}": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/custom_retriever/"}, "TimeWeightedVectorStoreRetriever": {"Define your embedding model": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/time_weighted_vectorstore/"}, "mock_now": {"Define your embedding model": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/time_weighted_vectorstore/"}, "ParentDocumentRetriever": {"This text splitter is used to create the child documents": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/parent_document_retriever/"}, "StructuredQueryOutputParser": {"This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/"}, "get_query_constructor_prompt": {"This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/"}, "Pinecone": {"Self-querying": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/self_query-checkpoint/"}, "RecursiveJsonSplitter": {"This is a large nested json object and will be loaded as a python dict": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/recursive_json_splitter/"}, "HTMLHeaderTextSplitter": {"for local file use html_splitter.split_text_from_file()": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/HTML_header_metadata/"}, "SemanticChunker": {"This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/semantic-chunker/"}, "SentenceTransformersTokenTextSplitter": {"This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/split_by_token/"}, "NLTKTextSplitter": {"This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/split_by_token/"}, "KonlpyTextSplitter": {"This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/split_by_token/"}, "MarkdownHeaderTextSplitter": {"MD splits": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/markdown_header_metadata/"}, "HTMLSectionSplitter": {"Split": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/HTML_section_aware_splitter/"}, "BaseBlobParser": {"Custom Document Loader {#custom-document-loader}": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/custom/"}, "FileSystemBlobLoader": {"Custom Document Loader {#custom-document-loader}": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/custom/"}, "MathpixPDFLoader": {"PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "PyPDFium2Loader": {"PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "PDFMinerLoader": {"PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "PDFMinerPDFasHTMLLoader": {"PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "PyMuPDFLoader": {"PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "PyPDFDirectoryLoader": {"PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "PDFPlumberLoader": {"PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "PythonLoader": {"File Directory": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/file_directory/"}, "ToolException": {"Import things that are needed generically": "https://python.langchain.com/v0.2/docs/modules/tools/custom_tools/"}, "MoveFileTool": {"tools_as_openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/tools/tools_as_openai_functions/"}, "BaseMemory": {"!python -m spacy download en_core_web_lg": "https://python.langchain.com/v0.2/docs/modules/memory/custom_memory/"}, "CombinedMemory": {"Combined": "https://python.langchain.com/v0.2/docs/modules/memory/multiple_memory/"}, "ConversationSummaryMemory": {"Combined": "https://python.langchain.com/v0.2/docs/modules/memory/multiple_memory/", "Conversation Summary": "https://python.langchain.com/v0.2/docs/modules/memory/types/summary/"}, "ConversationKGMemory": {"kg.md": "https://python.langchain.com/v0.2/docs/modules/memory/types/kg/"}, "ConversationTokenBufferMemory": {"We can see here that the buffer is updated": "https://python.langchain.com/v0.2/docs/modules/memory/types/token_buffer/"}, "ConversationEntityMemory": {"Entity": "https://python.langchain.com/v0.2/docs/modules/memory/types/entity_summary_memory/"}, "ENTITY_MEMORY_CONVERSATION_TEMPLATE": {"Entity": "https://python.langchain.com/v0.2/docs/modules/memory/types/entity_summary_memory/"}, "VectorStoreRetrieverMemory": {"Backed by a Vector Store": "https://python.langchain.com/v0.2/docs/modules/memory/types/vectorstore_retriever_memory/"}, "BaseCallbackHandler": {"To enable streaming, we pass in `streaming=True` to the ChatModel constructor": "https://python.langchain.com/v0.2/docs/modules/callbacks/async_callbacks/", "First, define custom callback handler implementations": "https://python.langchain.com/v0.2/docs/modules/callbacks/multiple_callbacks/"}, "FileCallbackHandler": {"this chain will both print to stdout (because verbose=True) and write to 'output.log'": "https://python.langchain.com/v0.2/docs/modules/callbacks/filecallbackhandler/"}, "LLMResult": {"To enable streaming, we pass in `streaming=True` to the ChatModel constructor": "https://python.langchain.com/v0.2/docs/modules/callbacks/async_callbacks/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/"}, "create_xml_agent": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/xml_agent/"}, "XMLAgentOutputParser": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/xml_agent/"}, "create_self_ask_with_search_agent": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/self_ask_with_search/"}, "TavilyAnswer": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/self_ask_with_search/"}, "OpenAIAssistantRunnable": {"openai_assistants.md": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_assistants/"}, "AgentActionMessageLog": {"Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/"}, "LLMMathChain": {"need to use GPT-4 here as GPT-3.5 does not understand, however hard you insist, that": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_iter/"}, "ChatGenerationChunk": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "GenerationChunk": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/", "custom_llm.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/custom_llm/"}, "CommaSeparatedListOutputParser": {"Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/", "csv.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/csv/"}, "get_bedrock_anthropic_callback": {"!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/"}, "AIMessageChunk": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/", "The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/"}, "FunctionMessageChunk": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "HumanMessageChunk": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "SystemMessageChunk": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "ToolMessageChunk": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "AsyncCallbackManagerForLLMRun": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "CallbackManagerForLLMRun": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/", "custom_llm.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/custom_llm/"}, "SimpleChatModel": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "ChatGeneration": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/", "The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/"}, "ChatResult": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "run_in_executor": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "AIMessagePromptTemplate": {"Prompts": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/prompts-checkpoint/"}, "JsonOutputToolsParser": {"Function calling": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/function_calling-checkpoint/", "openai_tools.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_tools/"}, "RunnableGenerator": {"The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/"}, "OutputParserException": {"The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/"}, "BaseGenerationOutputParser": {"The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/"}, "Generation": {"The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/"}, "SimpleJsonOutputParser": {"Define your desired data structure.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/quick_start/"}, "ResponseSchema": {"structured.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/structured/"}, "StructuredOutputParser": {"structured.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/structured/"}, "YamlOutputParser": {"Define your desired data structure.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/yaml/"}, "OutputFixingParser": {"retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/", "output_fixing.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/output_fixing/"}, "RetryOutputParser": {"retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/"}, "EnumOutputParser": {"enum.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/enum/"}, "JsonOutputFunctionsParser": {"openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_functions/", "prompt_llm_parser.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_llm_parser/"}, "PandasDataFrameOutputParser": {"Solely for documentation purposes.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/pandas_dataframe/"}, "PipelinePromptTemplate": {"composition.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/composition/"}, "ChatMessagePromptTemplate": {"Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/"}, "MaxMarginalRelevanceExampleSelector": {"Examples of a pretend task of creating antonyms.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/mmr/"}, "LengthBasedExampleSelector": {"Examples of a pretend task of creating antonyms.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/length_based/"}, "BaseExampleSelector": {"index.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/index/"}, "LLM": {"custom_llm.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/custom_llm/"}, "ChatPromptValue": {"prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/"}, "cosine_similarity": {"Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/"}, "ConfigurableFieldSpec": {"Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/"}, "HubRunnable": {"Configure chain internals at runtime {#configure-chain-internals-at-runtime}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/configure/"}} From e37caa9b9a461798ea066d7c03fffab7552574dc Mon Sep 17 00:00:00 2001 From: Erick Friis Date: Tue, 20 Aug 2024 16:07:12 -0700 Subject: [PATCH 28/80] core: fix fallback context overwriting (#25550) fixes #25337 --- libs/core/langchain_core/runnables/fallbacks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/libs/core/langchain_core/runnables/fallbacks.py b/libs/core/langchain_core/runnables/fallbacks.py index b3249b47cc42e..e3a1b2b1cfb33 100644 --- a/libs/core/langchain_core/runnables/fallbacks.py +++ b/libs/core/langchain_core/runnables/fallbacks.py @@ -180,6 +180,7 @@ def invoke( output = context.run( runnable.invoke, input, + config, **kwargs, ) except self.exceptions_to_handle as e: From 60cf49a61853848602298aada84daf197feb1f33 Mon Sep 17 00:00:00 2001 From: Erick Friis Date: Tue, 20 Aug 2024 16:21:38 -0700 Subject: [PATCH 29/80] chroma: ban chromadb sdk versions 0.5.4 and 0.5.5 due to pydantic bug (#25586) also remove some unused dependencies (fastapi) and unused test/lint/dev dependencies (community, openai, textsplitters) chromadb 0.5.4 introduced usage of `model_fields` which is pydantic v2 specific. also released in 0.5.5 --- libs/partners/chroma/poetry.lock | 2870 ++++++----------- libs/partners/chroma/pyproject.toml | 48 +- .../integration_tests/test_vectorstores.py | 4 +- 3 files changed, 912 insertions(+), 2010 deletions(-) diff --git a/libs/partners/chroma/poetry.lock b/libs/partners/chroma/poetry.lock index 3041eb17417bb..ad0b0441bfa98 100644 --- a/libs/partners/chroma/poetry.lock +++ b/libs/partners/chroma/poetry.lock @@ -1,124 +1,14 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. - -[[package]] -name = "aiohttp" -version = "3.9.5" -description = "Async http client/server framework (asyncio)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, - {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, - {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, - {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, - {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, - {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, - {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, - {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, - {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, - {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, -] - -[package.dependencies] -aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} -attrs = ">=17.3.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" -version = "0.6.0" +version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] [package.dependencies] @@ -126,13 +16,13 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} [[package]] name = "anyio" -version = "4.3.0" +version = "4.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, ] [package.dependencies] @@ -163,36 +53,6 @@ typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "attrs" -version = "23.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] - [[package]] name = "backoff" version = "2.2.1" @@ -206,38 +66,38 @@ files = [ [[package]] name = "bcrypt" -version = "4.1.3" +version = "4.2.0" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.7" files = [ - {file = "bcrypt-4.1.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:48429c83292b57bf4af6ab75809f8f4daf52aa5d480632e53707805cc1ce9b74"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8bea4c152b91fd8319fef4c6a790da5c07840421c2b785084989bf8bbb7455"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d3b317050a9a711a5c7214bf04e28333cf528e0ed0ec9a4e55ba628d0f07c1a"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:094fd31e08c2b102a14880ee5b3d09913ecf334cd604af27e1013c76831f7b05"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4fb253d65da30d9269e0a6f4b0de32bd657a0208a6f4e43d3e645774fb5457f3"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:193bb49eeeb9c1e2db9ba65d09dc6384edd5608d9d672b4125e9320af9153a15"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:8cbb119267068c2581ae38790e0d1fbae65d0725247a930fc9900c285d95725d"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6cac78a8d42f9d120b3987f82252bdbeb7e6e900a5e1ba37f6be6fe4e3848286"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01746eb2c4299dd0ae1670234bf77704f581dd72cc180f444bfe74eb80495b64"}, - {file = "bcrypt-4.1.3-cp37-abi3-win32.whl", hash = "sha256:037c5bf7c196a63dcce75545c8874610c600809d5d82c305dd327cd4969995bf"}, - {file = "bcrypt-4.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:8a893d192dfb7c8e883c4576813bf18bb9d59e2cfd88b68b725990f033f1b978"}, - {file = "bcrypt-4.1.3-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d4cf6ef1525f79255ef048b3489602868c47aea61f375377f0d00514fe4a78c"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5698ce5292a4e4b9e5861f7e53b1d89242ad39d54c3da451a93cac17b61921a"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec3c2e1ca3e5c4b9edb94290b356d082b721f3f50758bce7cce11d8a7c89ce84"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3a5be252fef513363fe281bafc596c31b552cf81d04c5085bc5dac29670faa08"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5f7cd3399fbc4ec290378b541b0cf3d4398e4737a65d0f938c7c0f9d5e686611"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:c4c8d9b3e97209dd7111bf726e79f638ad9224b4691d1c7cfefa571a09b1b2d6"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:31adb9cbb8737a581a843e13df22ffb7c84638342de3708a98d5c986770f2834"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:551b320396e1d05e49cc18dd77d970accd52b322441628aca04801bbd1d52a73"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6717543d2c110a155e6821ce5670c1f512f602eabb77dba95717ca76af79867d"}, - {file = "bcrypt-4.1.3-cp39-abi3-win32.whl", hash = "sha256:6004f5229b50f8493c49232b8e75726b568535fd300e5039e255d919fc3a07f2"}, - {file = "bcrypt-4.1.3-cp39-abi3-win_amd64.whl", hash = "sha256:2505b54afb074627111b5a8dc9b6ae69d0f01fea65c2fcaea403448c503d3991"}, - {file = "bcrypt-4.1.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:cb9c707c10bddaf9e5ba7cdb769f3e889e60b7d4fea22834b261f51ca2b89fed"}, - {file = "bcrypt-4.1.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9f8ea645eb94fb6e7bea0cf4ba121c07a3a182ac52876493870033141aa687bc"}, - {file = "bcrypt-4.1.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f44a97780677e7ac0ca393bd7982b19dbbd8d7228c1afe10b128fd9550eef5f1"}, - {file = "bcrypt-4.1.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d84702adb8f2798d813b17d8187d27076cca3cd52fe3686bb07a9083930ce650"}, - {file = "bcrypt-4.1.3.tar.gz", hash = "sha256:2ee15dd749f5952fe3f0430d0ff6b74082e159c50332a1413d51b5689cf06623"}, + {file = "bcrypt-4.2.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7"}, + {file = "bcrypt-4.2.0-cp37-abi3-win32.whl", hash = "sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458"}, + {file = "bcrypt-4.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5"}, + {file = "bcrypt-4.2.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8"}, + {file = "bcrypt-4.2.0-cp39-abi3-win32.whl", hash = "sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34"}, + {file = "bcrypt-4.2.0-cp39-abi3-win_amd64.whl", hash = "sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9"}, + {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a"}, + {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db"}, + {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:1ff39b78a52cf03fdf902635e4c81e544714861ba3f0efc56558979dd4f09170"}, + {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:373db9abe198e8e2c70d12b479464e0d5092cc122b20ec504097b5f2297ed184"}, + {file = "bcrypt-4.2.0.tar.gz", hash = "sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221"}, ] [package.extras] @@ -271,24 +131,24 @@ virtualenv = ["virtualenv (>=20.0.35)"] [[package]] name = "cachetools" -version = "5.3.3" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -429,13 +289,13 @@ numpy = "*" [[package]] name = "chromadb" -version = "0.5.0" +version = "0.5.3" description = "Chroma." optional = false python-versions = ">=3.8" files = [ - {file = "chromadb-0.5.0-py3-none-any.whl", hash = "sha256:8193dc65c143b61d8faf87f02c44ecfa778d471febd70de517f51c5d88a06009"}, - {file = "chromadb-0.5.0.tar.gz", hash = "sha256:7954af614a9ff7b2902ddbd0a162f33f7ec0669e2429903905c4f7876d1f766f"}, + {file = "chromadb-0.5.3-py3-none-any.whl", hash = "sha256:b3874f08356e291c68c6d2e177db472cd51f22f3af7b9746215b748fd1e29982"}, + {file = "chromadb-0.5.3.tar.gz", hash = "sha256:05d887f56a46b2e0fc6ac5ab979503a27b9ee50d5ca9e455f83b2fb9840cd026"}, ] [package.dependencies] @@ -445,10 +305,11 @@ chroma-hnswlib = "0.7.3" fastapi = ">=0.95.2" graphlib-backport = {version = ">=1.0.3", markers = "python_version < \"3.9\""} grpcio = ">=1.58.0" +httpx = ">=0.27.0" importlib-resources = "*" kubernetes = ">=28.1.0" mmh3 = ">=4.0.1" -numpy = ">=1.22.5" +numpy = ">=1.22.5,<2.0.0" onnxruntime = ">=1.14.1" opentelemetry-api = ">=1.2.0" opentelemetry-exporter-otlp-proto-grpc = ">=1.2.0" @@ -484,13 +345,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "codespell" -version = "2.2.6" +version = "2.3.0" description = "Codespell" optional = false python-versions = ">=3.8" files = [ - {file = "codespell-2.2.6-py3-none-any.whl", hash = "sha256:9ee9a3e5df0990604013ac2a9f22fa8e57669c827124a2e961fe8a1da4cacc07"}, - {file = "codespell-2.2.6.tar.gz", hash = "sha256:a8c65d8eb3faa03deabab6b3bbe798bea72e1799c7e9e955d57eca4096abcff9"}, + {file = "codespell-2.3.0-py3-none-any.whl", hash = "sha256:a9c7cef2501c9cfede2110fd6d4e5e62296920efe9abfb84648df866e47f58d1"}, + {file = "codespell-2.3.0.tar.gz", hash = "sha256:360c7d10f75e65f67bad720af7007e1060a5d395670ec11a7ed1fed9dd17471f"}, ] [package.extras] @@ -527,21 +388,6 @@ humanfriendly = ">=9.1" [package.extras] cron = ["capturer (>=2.4)"] -[[package]] -name = "dataclasses-json" -version = "0.6.6" -description = "Easily serialize dataclasses to and from JSON." -optional = false -python-versions = "<4.0,>=3.7" -files = [ - {file = "dataclasses_json-0.6.6-py3-none-any.whl", hash = "sha256:e54c5c87497741ad454070ba0ed411523d46beb5da102e221efb873801b0ba85"}, - {file = "dataclasses_json-0.6.6.tar.gz", hash = "sha256:0c09827d26fffda27f1be2fed7a7a01a29c5ddcd2eb6393ad5ebf9d77e9deae8"}, -] - -[package.dependencies] -marshmallow = ">=3.18.0,<4.0.0" -typing-inspect = ">=0.4.0,<1" - [[package]] name = "deprecated" version = "1.2.14" @@ -559,61 +405,15 @@ wrapt = ">=1.10,<2" [package.extras] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] -[[package]] -name = "distro" -version = "1.9.0" -description = "Distro - an OS platform information API" -optional = false -python-versions = ">=3.6" -files = [ - {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, - {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, -] - -[[package]] -name = "dnspython" -version = "2.6.1" -description = "DNS toolkit" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, - {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, -] - -[package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=41)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=0.9.25)"] -idna = ["idna (>=3.6)"] -trio = ["trio (>=0.23)"] -wmi = ["wmi (>=1.5.1)"] - -[[package]] -name = "email-validator" -version = "2.1.1" -description = "A robust email address syntax and deliverability validation library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "email_validator-2.1.1-py3-none-any.whl", hash = "sha256:97d882d174e2a65732fb43bfce81a3a834cbc1bde8bf419e30ef5ea976370a05"}, - {file = "email_validator-2.1.1.tar.gz", hash = "sha256:200a70680ba08904be6d1eef729205cc0d687634399a5924d842533efb824b84"}, -] - -[package.dependencies] -dnspython = ">=2.0.0" -idna = ">=2.0.0" - [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -621,61 +421,38 @@ test = ["pytest (>=6)"] [[package]] name = "fastapi" -version = "0.111.0" +version = "0.112.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.111.0-py3-none-any.whl", hash = "sha256:97ecbf994be0bcbdadedf88c3150252bed7b2087075ac99735403b1b76cc8fc0"}, - {file = "fastapi-0.111.0.tar.gz", hash = "sha256:b9db9dd147c91cb8b769f7183535773d8741dd46f9dc6676cd82eab510228cd7"}, + {file = "fastapi-0.112.1-py3-none-any.whl", hash = "sha256:bcbd45817fc2a1cd5da09af66815b84ec0d3d634eb173d1ab468ae3103e183e4"}, + {file = "fastapi-0.112.1.tar.gz", hash = "sha256:b2537146f8c23389a7faa8b03d0bd38d4986e6983874557d95eed2acc46448ef"}, ] [package.dependencies] -email_validator = ">=2.0.0" -fastapi-cli = ">=0.0.2" -httpx = ">=0.23.0" -jinja2 = ">=2.11.2" -orjson = ">=3.2.1" pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -python-multipart = ">=0.0.7" -starlette = ">=0.37.2,<0.38.0" +starlette = ">=0.37.2,<0.39.0" typing-extensions = ">=4.8.0" -ujson = ">=4.0.1,<4.0.2 || >4.0.2,<4.1.0 || >4.1.0,<4.2.0 || >4.2.0,<4.3.0 || >4.3.0,<5.0.0 || >5.0.0,<5.1.0 || >5.1.0" -uvicorn = {version = ">=0.12.0", extras = ["standard"]} [package.extras] -all = ["email_validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] - -[[package]] -name = "fastapi-cli" -version = "0.0.3" -description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastapi_cli-0.0.3-py3-none-any.whl", hash = "sha256:ae233115f729945479044917d949095e829d2d84f56f55ce1ca17627872825a5"}, - {file = "fastapi_cli-0.0.3.tar.gz", hash = "sha256:3b6e4d2c4daee940fb8db59ebbfd60a72c4b962bcf593e263e4cc69da4ea3d7f"}, -] - -[package.dependencies] -fastapi = "*" -typer = ">=0.12.3" -uvicorn = {version = ">=0.15.0", extras = ["standard"]} +all = ["email_validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email_validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=2.11.2)", "python-multipart (>=0.0.7)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "filelock" -version = "3.14.0" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, - {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -703,101 +480,15 @@ files = [ [package.dependencies] python-dateutil = ">=2.7" -[[package]] -name = "frozenlist" -version = "1.4.1" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, -] - [[package]] name = "fsspec" -version = "2024.5.0" +version = "2024.6.1" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2024.5.0-py3-none-any.whl", hash = "sha256:e0fdbc446d67e182f49a70b82cf7889028a63588fde6b222521f10937b2b670c"}, - {file = "fsspec-2024.5.0.tar.gz", hash = "sha256:1d021b0b0f933e3b3029ed808eb400c08ba101ca2de4b3483fbc9ca23fcee94a"}, + {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, + {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, ] [package.extras] @@ -806,6 +497,7 @@ adl = ["adlfs"] arrow = ["pyarrow (>=1)"] dask = ["dask", "distributed"] dev = ["pre-commit", "ruff"] +doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] dropbox = ["dropbox", "dropboxdrivefs", "requests"] full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] fuse = ["fusepy"] @@ -829,13 +521,13 @@ tqdm = ["tqdm"] [[package]] name = "google-auth" -version = "2.29.0" +version = "2.34.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, - {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, + {file = "google_auth-2.34.0-py2.py3-none-any.whl", hash = "sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65"}, + {file = "google_auth-2.34.0.tar.gz", hash = "sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc"}, ] [package.dependencies] @@ -845,24 +537,24 @@ rsa = ">=3.1.4,<5" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "googleapis-common-protos" -version = "1.63.0" +version = "1.63.2" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, - {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, + {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, + {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, ] [package.dependencies] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] @@ -878,134 +570,63 @@ files = [ {file = "graphlib_backport-1.1.0.tar.gz", hash = "sha256:00a7888b21e5393064a133209cb5d3b3ef0a2096cf023914c9d778dff5644125"}, ] -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - [[package]] name = "grpcio" -version = "1.63.0" +version = "1.65.5" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.63.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:2e93aca840c29d4ab5db93f94ed0a0ca899e241f2e8aec6334ab3575dc46125c"}, - {file = "grpcio-1.63.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:91b73d3f1340fefa1e1716c8c1ec9930c676d6b10a3513ab6c26004cb02d8b3f"}, - {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b3afbd9d6827fa6f475a4f91db55e441113f6d3eb9b7ebb8fb806e5bb6d6bd0d"}, - {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f3f6883ce54a7a5f47db43289a0a4c776487912de1a0e2cc83fdaec9685cc9f"}, - {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf8dae9cc0412cb86c8de5a8f3be395c5119a370f3ce2e69c8b7d46bb9872c8d"}, - {file = "grpcio-1.63.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:08e1559fd3b3b4468486b26b0af64a3904a8dbc78d8d936af9c1cf9636eb3e8b"}, - {file = "grpcio-1.63.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5c039ef01516039fa39da8a8a43a95b64e288f79f42a17e6c2904a02a319b357"}, - {file = "grpcio-1.63.0-cp310-cp310-win32.whl", hash = "sha256:ad2ac8903b2eae071055a927ef74121ed52d69468e91d9bcbd028bd0e554be6d"}, - {file = "grpcio-1.63.0-cp310-cp310-win_amd64.whl", hash = "sha256:b2e44f59316716532a993ca2966636df6fbe7be4ab6f099de6815570ebe4383a"}, - {file = "grpcio-1.63.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:f28f8b2db7b86c77916829d64ab21ff49a9d8289ea1564a2b2a3a8ed9ffcccd3"}, - {file = "grpcio-1.63.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:65bf975639a1f93bee63ca60d2e4951f1b543f498d581869922910a476ead2f5"}, - {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:b5194775fec7dc3dbd6a935102bb156cd2c35efe1685b0a46c67b927c74f0cfb"}, - {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4cbb2100ee46d024c45920d16e888ee5d3cf47c66e316210bc236d5bebc42b3"}, - {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff737cf29b5b801619f10e59b581869e32f400159e8b12d7a97e7e3bdeee6a2"}, - {file = "grpcio-1.63.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd1e68776262dd44dedd7381b1a0ad09d9930ffb405f737d64f505eb7f77d6c7"}, - {file = "grpcio-1.63.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:93f45f27f516548e23e4ec3fbab21b060416007dbe768a111fc4611464cc773f"}, - {file = "grpcio-1.63.0-cp311-cp311-win32.whl", hash = "sha256:878b1d88d0137df60e6b09b74cdb73db123f9579232c8456f53e9abc4f62eb3c"}, - {file = "grpcio-1.63.0-cp311-cp311-win_amd64.whl", hash = "sha256:756fed02dacd24e8f488f295a913f250b56b98fb793f41d5b2de6c44fb762434"}, - {file = "grpcio-1.63.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:93a46794cc96c3a674cdfb59ef9ce84d46185fe9421baf2268ccb556f8f81f57"}, - {file = "grpcio-1.63.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a7b19dfc74d0be7032ca1eda0ed545e582ee46cd65c162f9e9fc6b26ef827dc6"}, - {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:8064d986d3a64ba21e498b9a376cbc5d6ab2e8ab0e288d39f266f0fca169b90d"}, - {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:219bb1848cd2c90348c79ed0a6b0ea51866bc7e72fa6e205e459fedab5770172"}, - {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2d60cd1d58817bc5985fae6168d8b5655c4981d448d0f5b6194bbcc038090d2"}, - {file = "grpcio-1.63.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e350cb096e5c67832e9b6e018cf8a0d2a53b2a958f6251615173165269a91b0"}, - {file = "grpcio-1.63.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:56cdf96ff82e3cc90dbe8bac260352993f23e8e256e063c327b6cf9c88daf7a9"}, - {file = "grpcio-1.63.0-cp312-cp312-win32.whl", hash = "sha256:3a6d1f9ea965e750db7b4ee6f9fdef5fdf135abe8a249e75d84b0a3e0c668a1b"}, - {file = "grpcio-1.63.0-cp312-cp312-win_amd64.whl", hash = "sha256:d2497769895bb03efe3187fb1888fc20e98a5f18b3d14b606167dacda5789434"}, - {file = "grpcio-1.63.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:fdf348ae69c6ff484402cfdb14e18c1b0054ac2420079d575c53a60b9b2853ae"}, - {file = "grpcio-1.63.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a3abfe0b0f6798dedd2e9e92e881d9acd0fdb62ae27dcbbfa7654a57e24060c0"}, - {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:6ef0ad92873672a2a3767cb827b64741c363ebaa27e7f21659e4e31f4d750280"}, - {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b416252ac5588d9dfb8a30a191451adbf534e9ce5f56bb02cd193f12d8845b7f"}, - {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3b77eaefc74d7eb861d3ffbdf91b50a1bb1639514ebe764c47773b833fa2d91"}, - {file = "grpcio-1.63.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b005292369d9c1f80bf70c1db1c17c6c342da7576f1c689e8eee4fb0c256af85"}, - {file = "grpcio-1.63.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cdcda1156dcc41e042d1e899ba1f5c2e9f3cd7625b3d6ebfa619806a4c1aadda"}, - {file = "grpcio-1.63.0-cp38-cp38-win32.whl", hash = "sha256:01799e8649f9e94ba7db1aeb3452188048b0019dc37696b0f5ce212c87c560c3"}, - {file = "grpcio-1.63.0-cp38-cp38-win_amd64.whl", hash = "sha256:6a1a3642d76f887aa4009d92f71eb37809abceb3b7b5a1eec9c554a246f20e3a"}, - {file = "grpcio-1.63.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:75f701ff645858a2b16bc8c9fc68af215a8bb2d5a9b647448129de6e85d52bce"}, - {file = "grpcio-1.63.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cacdef0348a08e475a721967f48206a2254a1b26ee7637638d9e081761a5ba86"}, - {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:0697563d1d84d6985e40ec5ec596ff41b52abb3fd91ec240e8cb44a63b895094"}, - {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6426e1fb92d006e47476d42b8f240c1d916a6d4423c5258ccc5b105e43438f61"}, - {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48cee31bc5f5a31fb2f3b573764bd563aaa5472342860edcc7039525b53e46a"}, - {file = "grpcio-1.63.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:50344663068041b34a992c19c600236e7abb42d6ec32567916b87b4c8b8833b3"}, - {file = "grpcio-1.63.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:259e11932230d70ef24a21b9fb5bb947eb4703f57865a404054400ee92f42f5d"}, - {file = "grpcio-1.63.0-cp39-cp39-win32.whl", hash = "sha256:a44624aad77bf8ca198c55af811fd28f2b3eaf0a50ec5b57b06c034416ef2d0a"}, - {file = "grpcio-1.63.0-cp39-cp39-win_amd64.whl", hash = "sha256:166e5c460e5d7d4656ff9e63b13e1f6029b122104c1633d5f37eaea348d7356d"}, - {file = "grpcio-1.63.0.tar.gz", hash = "sha256:f3023e14805c61bc439fb40ca545ac3d5740ce66120a678a3c6c2c55b70343d1"}, + {file = "grpcio-1.65.5-cp310-cp310-linux_armv7l.whl", hash = "sha256:b67d450f1e008fedcd81e097a3a400a711d8be1a8b20f852a7b8a73fead50fe3"}, + {file = "grpcio-1.65.5-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:a70a20eed87bba647a38bedd93b3ce7db64b3f0e8e0952315237f7f5ca97b02d"}, + {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:f79c87c114bf37adf408026b9e2e333fe9ff31dfc9648f6f80776c513145c813"}, + {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17f9fa2d947dbfaca01b3ab2c62eefa8240131fdc67b924eb42ce6032e3e5c1"}, + {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32d60e18ff7c34fe3f6db3d35ad5c6dc99f5b43ff3982cb26fad4174462d10b1"}, + {file = "grpcio-1.65.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fe6505376f5b00bb008e4e1418152e3ad3d954b629da286c7913ff3cfc0ff740"}, + {file = "grpcio-1.65.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:33158e56c6378063923c417e9fbdb28660b6e0e2835af42e67f5a7793f587af7"}, + {file = "grpcio-1.65.5-cp310-cp310-win32.whl", hash = "sha256:1cbc208edb9acf1cc339396a1a36b83796939be52f34e591c90292045b579fbf"}, + {file = "grpcio-1.65.5-cp310-cp310-win_amd64.whl", hash = "sha256:bc74f3f745c37e2c5685c9d2a2d5a94de00f286963f5213f763ae137bf4f2358"}, + {file = "grpcio-1.65.5-cp311-cp311-linux_armv7l.whl", hash = "sha256:3207ae60d07e5282c134b6e02f9271a2cb523c6d7a346c6315211fe2bf8d61ed"}, + {file = "grpcio-1.65.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a2f80510f99f82d4eb825849c486df703f50652cea21c189eacc2b84f2bde764"}, + {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a80e9a5e3f93c54f5eb82a3825ea1fc4965b2fa0026db2abfecb139a5c4ecdf1"}, + {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b2944390a496567de9e70418f3742b477d85d8ca065afa90432edc91b4bb8ad"}, + {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3655139d7be213c32c79ef6fb2367cae28e56ef68e39b1961c43214b457f257"}, + {file = "grpcio-1.65.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05f02d68fc720e085f061b704ee653b181e6d5abfe315daef085719728d3d1fd"}, + {file = "grpcio-1.65.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1c4caafe71aef4dabf53274bbf4affd6df651e9f80beedd6b8e08ff438ed3260"}, + {file = "grpcio-1.65.5-cp311-cp311-win32.whl", hash = "sha256:84c901cdec16a092099f251ef3360d15e29ef59772150fa261d94573612539b5"}, + {file = "grpcio-1.65.5-cp311-cp311-win_amd64.whl", hash = "sha256:11f8b16121768c1cb99d7dcb84e01510e60e6a206bf9123e134118802486f035"}, + {file = "grpcio-1.65.5-cp312-cp312-linux_armv7l.whl", hash = "sha256:ee6ed64a27588a2c94e8fa84fe8f3b5c89427d4d69c37690903d428ec61ca7e4"}, + {file = "grpcio-1.65.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:76991b7a6fb98630a3328839755181ce7c1aa2b1842aa085fd4198f0e5198960"}, + {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:89c00a18801b1ed9cc441e29b521c354725d4af38c127981f2c950c796a09b6e"}, + {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:078038e150a897e5e402ed3d57f1d31ebf604cbed80f595bd281b5da40762a92"}, + {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c97962720489ef31b5ad8a916e22bc31bba3664e063fb9f6702dce056d4aa61b"}, + {file = "grpcio-1.65.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b8270b15b99781461b244f5c81d5c2bc9696ab9189fb5ff86c841417fb3b39fe"}, + {file = "grpcio-1.65.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e5c4c15ac3fe1eb68e46bc51e66ad29be887479f231f8237cf8416058bf0cc1"}, + {file = "grpcio-1.65.5-cp312-cp312-win32.whl", hash = "sha256:f5b5970341359341d0e4c789da7568264b2a89cd976c05ea476036852b5950cd"}, + {file = "grpcio-1.65.5-cp312-cp312-win_amd64.whl", hash = "sha256:238a625f391a1b9f5f069bdc5930f4fd71b74426bea52196fc7b83f51fa97d34"}, + {file = "grpcio-1.65.5-cp38-cp38-linux_armv7l.whl", hash = "sha256:6c4e62bcf297a1568f627f39576dbfc27f1e5338a691c6dd5dd6b3979da51d1c"}, + {file = "grpcio-1.65.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d7df567b67d16d4177835a68d3f767bbcbad04da9dfb52cbd19171f430c898bd"}, + {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:b7ca419f1462390851eec395b2089aad1e49546b52d4e2c972ceb76da69b10f8"}, + {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa36dd8496d3af0d40165252a669fa4f6fd2db4b4026b9a9411cbf060b9d6a15"}, + {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a101696f9ece90a0829988ff72f1b1ea2358f3df035bdf6d675dd8b60c2c0894"}, + {file = "grpcio-1.65.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2a6d8169812932feac514b420daffae8ab8e36f90f3122b94ae767e633296b17"}, + {file = "grpcio-1.65.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:47d0aaaab82823f0aa6adea5184350b46e2252e13a42a942db84da5b733f2e05"}, + {file = "grpcio-1.65.5-cp38-cp38-win32.whl", hash = "sha256:85ae8f8517d5bcc21fb07dbf791e94ed84cc28f84c903cdc2bd7eaeb437c8f45"}, + {file = "grpcio-1.65.5-cp38-cp38-win_amd64.whl", hash = "sha256:770bd4bd721961f6dd8049bc27338564ba8739913f77c0f381a9815e465ff965"}, + {file = "grpcio-1.65.5-cp39-cp39-linux_armv7l.whl", hash = "sha256:ab5ec837d8cee8dbce9ef6386125f119b231e4333cc6b6d57b6c5c7c82a72331"}, + {file = "grpcio-1.65.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cabd706183ee08d8026a015af5819a0b3a8959bdc9d1f6fdacd1810f09200f2a"}, + {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:ec71fc5b39821ad7d80db7473c8f8c2910f3382f0ddadfbcfc2c6c437107eb67"}, + {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a9e35bcb045e39d7cac30464c285389b9a816ac2067e4884ad2c02e709ef8e"}, + {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d750e9330eb14236ca11b78d0c494eed13d6a95eb55472298f0e547c165ee324"}, + {file = "grpcio-1.65.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2b91ce647b6307f25650872454a4d02a2801f26a475f90d0b91ed8110baae589"}, + {file = "grpcio-1.65.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8da58ff80bc4556cf29bc03f5fff1f03b8387d6aaa7b852af9eb65b2cf833be4"}, + {file = "grpcio-1.65.5-cp39-cp39-win32.whl", hash = "sha256:7a412959aa5f08c5ac04aa7b7c3c041f5e4298cadd4fcc2acff195b56d185ebc"}, + {file = "grpcio-1.65.5-cp39-cp39-win_amd64.whl", hash = "sha256:55714ea852396ec9568f45f487639945ab674de83c12bea19d5ddbc3ae41ada3"}, + {file = "grpcio-1.65.5.tar.gz", hash = "sha256:ec6f219fb5d677a522b0deaf43cea6697b16f338cb68d009e30930c4aa0d2209"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.63.0)"] +protobuf = ["grpcio-tools (>=1.65.5)"] [[package]] name = "h11" @@ -1113,13 +734,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "huggingface-hub" -version = "0.23.0" +version = "0.24.6" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.23.0-py3-none-any.whl", hash = "sha256:075c30d48ee7db2bba779190dc526d2c11d422aed6f9044c5e2fdc2c432fdb91"}, - {file = "huggingface_hub-0.23.0.tar.gz", hash = "sha256:7126dedd10a4c6fac796ced4d87a8cf004efc722a5125c2c09299017fa366fa9"}, + {file = "huggingface_hub-0.24.6-py3-none-any.whl", hash = "sha256:a990f3232aa985fe749bc9474060cbad75e8b2f115f6665a9fda5b9c97818970"}, + {file = "huggingface_hub-0.24.6.tar.gz", hash = "sha256:cc2579e761d070713eaa9c323e3debe39d5b464ae3a7261c39a9195b27bb8000"}, ] [package.dependencies] @@ -1132,17 +753,17 @@ tqdm = ">=4.42.1" typing-extensions = ">=3.7.4.3" [package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] hf-transfer = ["hf-transfer (>=0.1.4)"] inference = ["aiohttp", "minijinja (>=1.0)"] -quality = ["mypy (==1.5.1)", "ruff (>=0.3.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.5.0)"] tensorflow = ["graphviz", "pydot", "tensorflow"] tensorflow-testing = ["keras (<3.0)", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] -torch = ["safetensors", "torch"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors[torch]", "torch"] typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] [[package]] @@ -1172,40 +793,40 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.0" +version = "8.0.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"}, - {file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"}, + {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, + {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "importlib-resources" -version = "6.4.0" +version = "6.4.3" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, - {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, + {file = "importlib_resources-6.4.3-py3-none-any.whl", hash = "sha256:2d6dfe3b9e055f72495c2085890837fc8c758984e209115c8792bddcb762cd93"}, + {file = "importlib_resources-6.4.3.tar.gz", hash = "sha256:4a202b9b9d38563b46da59221d77bb73862ab5d79d461307bcb826d725448b98"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] [[package]] name = "iniconfig" @@ -1218,23 +839,6 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - [[package]] name = "jsonpatch" version = "1.33" @@ -1251,24 +855,24 @@ jsonpointer = ">=1.9" [[package]] name = "jsonpointer" -version = "2.4" +version = "3.0.0" description = "Identify specific nodes in a JSON document (RFC 6901)" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +python-versions = ">=3.7" files = [ - {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, ] [[package]] name = "kubernetes" -version = "29.0.0" +version = "30.1.0" description = "Kubernetes python client" optional = false python-versions = ">=3.6" files = [ - {file = "kubernetes-29.0.0-py2.py3-none-any.whl", hash = "sha256:ab8cb0e0576ccdfb71886366efb102c6a20f268d817be065ce7f9909c631e43e"}, - {file = "kubernetes-29.0.0.tar.gz", hash = "sha256:c4812e227ae74d07d53c88293e564e54b850452715a59a927e7e1bc6b9a60459"}, + {file = "kubernetes-30.1.0-py2.py3-none-any.whl", hash = "sha256:e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d"}, + {file = "kubernetes-30.1.0.tar.gz", hash = "sha256:41e4c77af9f28e7a6c314e3bd06a8c6229ddd787cad684e0ab9f69b498e98ebc"}, ] [package.dependencies] @@ -1286,66 +890,9 @@ websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] -[[package]] -name = "langchain" -version = "0.2.6" -description = "Building applications with LLMs through composability" -optional = false -python-versions = ">=3.8.1,<4.0" -files = [] -develop = true - -[package.dependencies] -aiohttp = "^3.8.3" -async-timeout = {version = "^4.0.0", markers = "python_version < \"3.11\""} -langchain-core = "^0.2.10" -langchain-text-splitters = "^0.2.0" -langsmith = "^0.1.17" -numpy = [ - {version = ">=1,<2", markers = "python_version < \"3.12\""}, - {version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""}, -] -pydantic = ">=1,<3" -PyYAML = ">=5.3" -requests = "^2" -SQLAlchemy = ">=1.4,<3" -tenacity = "^8.1.0,!=8.4.0" - -[package.source] -type = "directory" -url = "../../langchain" - -[[package]] -name = "langchain-community" -version = "0.2.6" -description = "Community contributed LangChain integrations." -optional = false -python-versions = ">=3.8.1,<4.0" -files = [] -develop = true - -[package.dependencies] -aiohttp = "^3.8.3" -dataclasses-json = ">= 0.5.7, < 0.7" -langchain = "^0.2.6" -langchain-core = "^0.2.10" -langsmith = "^0.1.0" -numpy = [ - {version = ">=1,<2", markers = "python_version < \"3.12\""}, - {version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""}, -] -PyYAML = ">=5.3" -requests = "^2" -SQLAlchemy = ">=1.4,<3" -tenacity = "^8.1.0,!=8.4.0" - -[package.source] -type = "directory" -url = "../../community" - [[package]] name = "langchain-core" -version = "0.2.11" +version = "0.2.33" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -1362,59 +909,30 @@ pydantic = [ ] PyYAML = ">=5.3" tenacity = "^8.1.0,!=8.4.0" +typing-extensions = ">=4.7" [package.source] type = "directory" url = "../../core" -[[package]] -name = "langchain-openai" -version = "0.1.13" -description = "An integration package connecting OpenAI and LangChain" -optional = false -python-versions = ">=3.8.1,<4.0" -files = [] -develop = true - -[package.dependencies] -langchain-core = ">=0.2.2,<0.3" -openai = "^1.32.0" -tiktoken = ">=0.7,<1" - -[package.source] -type = "directory" -url = "../openai" - -[[package]] -name = "langchain-text-splitters" -version = "0.2.2" -description = "LangChain text splitting utilities" -optional = false -python-versions = ">=3.8.1,<4.0" -files = [] -develop = true - -[package.dependencies] -langchain-core = "^0.2.10" - -[package.source] -type = "directory" -url = "../../text-splitters" - [[package]] name = "langsmith" -version = "0.1.77" +version = "0.1.100" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.77-py3-none-any.whl", hash = "sha256:2202cc21b1ed7e7b9e5d2af2694be28898afa048c09fdf09f620cbd9301755ae"}, - {file = "langsmith-0.1.77.tar.gz", hash = "sha256:4ace09077a9a4e412afeb4b517ca68e7de7b07f36e4792dc8236ac5207c0c0c7"}, + {file = "langsmith-0.1.100-py3-none-any.whl", hash = "sha256:cae44a884a4166c4d8b9cc5ff99f5d520337bd90b9dadfe3706ed31415d559a7"}, + {file = "langsmith-0.1.100.tar.gz", hash = "sha256:20ff0126253a5a1d621635a3bc44ccacc036e855f52185ae983420f14eb6c605"}, ] [package.dependencies] +httpx = ">=0.23.0,<1" orjson = ">=3.9.14,<4.0.0" -pydantic = ">=1,<3" +pydantic = [ + {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, + {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, +] requests = ">=2,<3" [[package]] @@ -1441,94 +959,6 @@ profiling = ["gprof2dot"] rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "marshmallow" -version = "3.21.2" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -optional = false -python-versions = ">=3.8" -files = [ - {file = "marshmallow-3.21.2-py3-none-any.whl", hash = "sha256:70b54a6282f4704d12c0a41599682c5c5450e843b9ec406308653b47c59648a1"}, - {file = "marshmallow-3.21.2.tar.gz", hash = "sha256:82408deadd8b33d56338d2182d455db632c6313aa2af61916672146bb32edc56"}, -] - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] -tests = ["pytest", "pytz", "simplejson"] - [[package]] name = "mdurl" version = "0.1.2" @@ -1660,145 +1090,46 @@ docs = ["sphinx"] gmpy = ["gmpy2 (>=2.1.0a4)"] tests = ["pytest (>=4.6)"] -[[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] - [[package]] name = "mypy" -version = "1.10.0" +version = "1.11.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, - {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, - {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, - {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, - {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, - {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, - {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, - {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, - {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, - {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, - {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, - {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, - {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, - {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, - {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, - {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, - {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, - {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, - {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, + {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, + {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, + {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, + {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, + {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, + {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, + {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, + {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, + {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, + {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, + {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, + {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, + {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, + {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, + {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, + {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, + {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, + {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, + {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] @@ -1917,36 +1248,36 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "onnxruntime" -version = "1.17.3" +version = "1.19.0" description = "ONNX Runtime is a runtime accelerator for Machine Learning models" optional = false python-versions = "*" files = [ - {file = "onnxruntime-1.17.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d86dde9c0bb435d709e51bd25991c9fe5b9a5b168df45ce119769edc4d198b15"}, - {file = "onnxruntime-1.17.3-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9d87b68bf931ac527b2d3c094ead66bb4381bac4298b65f46c54fe4d1e255865"}, - {file = "onnxruntime-1.17.3-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26e950cf0333cf114a155f9142e71da344d2b08dfe202763a403ae81cc02ebd1"}, - {file = "onnxruntime-1.17.3-cp310-cp310-win32.whl", hash = "sha256:0962a4d0f5acebf62e1f0bf69b6e0adf16649115d8de854c1460e79972324d68"}, - {file = "onnxruntime-1.17.3-cp310-cp310-win_amd64.whl", hash = "sha256:468ccb8a0faa25c681a41787b1594bf4448b0252d3efc8b62fd8b2411754340f"}, - {file = "onnxruntime-1.17.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e8cd90c1c17d13d47b89ab076471e07fb85467c01dcd87a8b8b5cdfbcb40aa51"}, - {file = "onnxruntime-1.17.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a058b39801baefe454eeb8acf3ada298c55a06a4896fafc224c02d79e9037f60"}, - {file = "onnxruntime-1.17.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f823d5eb4807007f3da7b27ca972263df6a1836e6f327384eb266274c53d05d"}, - {file = "onnxruntime-1.17.3-cp311-cp311-win32.whl", hash = "sha256:b66b23f9109e78ff2791628627a26f65cd335dcc5fbd67ff60162733a2f7aded"}, - {file = "onnxruntime-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:570760ca53a74cdd751ee49f13de70d1384dcf73d9888b8deac0917023ccda6d"}, - {file = "onnxruntime-1.17.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:77c318178d9c16e9beadd9a4070d8aaa9f57382c3f509b01709f0f010e583b99"}, - {file = "onnxruntime-1.17.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23da8469049b9759082e22c41a444f44a520a9c874b084711b6343672879f50b"}, - {file = "onnxruntime-1.17.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2949730215af3f9289008b2e31e9bbef952012a77035b911c4977edea06f3f9e"}, - {file = "onnxruntime-1.17.3-cp312-cp312-win32.whl", hash = "sha256:6c7555a49008f403fb3b19204671efb94187c5085976ae526cb625f6ede317bc"}, - {file = "onnxruntime-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:58672cf20293a1b8a277a5c6c55383359fcdf6119b2f14df6ce3b140f5001c39"}, - {file = "onnxruntime-1.17.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:4395ba86e3c1e93c794a00619ef1aec597ab78f5a5039f3c6d2e9d0695c0a734"}, - {file = "onnxruntime-1.17.3-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdf354c04344ec38564fc22394e1fe08aa6d70d790df00159205a0055c4a4d3f"}, - {file = "onnxruntime-1.17.3-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a94b600b7af50e922d44b95a57981e3e35103c6e3693241a03d3ca204740bbda"}, - {file = "onnxruntime-1.17.3-cp38-cp38-win32.whl", hash = "sha256:5a335c76f9c002a8586c7f38bc20fe4b3725ced21f8ead835c3e4e507e42b2ab"}, - {file = "onnxruntime-1.17.3-cp38-cp38-win_amd64.whl", hash = "sha256:8f56a86fbd0ddc8f22696ddeda0677b041381f4168a2ca06f712ef6ec6050d6d"}, - {file = "onnxruntime-1.17.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:e0ae39f5452278cd349520c296e7de3e90d62dc5b0157c6868e2748d7f28b871"}, - {file = "onnxruntime-1.17.3-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ff2dc012bd930578aff5232afd2905bf16620815f36783a941aafabf94b3702"}, - {file = "onnxruntime-1.17.3-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cf6c37483782e4785019b56e26224a25e9b9a35b849d0169ce69189867a22bb1"}, - {file = "onnxruntime-1.17.3-cp39-cp39-win32.whl", hash = "sha256:351bf5a1140dcc43bfb8d3d1a230928ee61fcd54b0ea664c8e9a889a8e3aa515"}, - {file = "onnxruntime-1.17.3-cp39-cp39-win_amd64.whl", hash = "sha256:57a3de15778da8d6cc43fbf6cf038e1e746146300b5f0b1fbf01f6f795dc6440"}, + {file = "onnxruntime-1.19.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:6ce22a98dfec7b646ae305f52d0ce14a189a758b02ea501860ca719f4b0ae04b"}, + {file = "onnxruntime-1.19.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:19019c72873f26927aa322c54cf2bf7312b23451b27451f39b88f57016c94f8b"}, + {file = "onnxruntime-1.19.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8eaa16df99171dc636e30108d15597aed8c4c2dd9dbfdd07cc464d57d73fb275"}, + {file = "onnxruntime-1.19.0-cp310-cp310-win32.whl", hash = "sha256:0eb0f8dbe596fd0f4737fe511fdbb17603853a7d204c5b2ca38d3c7808fc556b"}, + {file = "onnxruntime-1.19.0-cp310-cp310-win_amd64.whl", hash = "sha256:616092d54ba8023b7bc0a5f6d900a07a37cc1cfcc631873c15f8c1d6e9e184d4"}, + {file = "onnxruntime-1.19.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a2b53b3c287cd933e5eb597273926e899082d8c84ab96e1b34035764a1627e17"}, + {file = "onnxruntime-1.19.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e94984663963e74fbb468bde9ec6f19dcf890b594b35e249c4dc8789d08993c5"}, + {file = "onnxruntime-1.19.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f379d1f050cfb55ce015d53727b78ee362febc065c38eed81512b22b757da73"}, + {file = "onnxruntime-1.19.0-cp311-cp311-win32.whl", hash = "sha256:4ccb48faea02503275ae7e79e351434fc43c294c4cb5c4d8bcb7479061396614"}, + {file = "onnxruntime-1.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:9cdc8d311289a84e77722de68bd22b8adfb94eea26f4be6f9e017350faac8b18"}, + {file = "onnxruntime-1.19.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:1b59eaec1be9a8613c5fdeaafe67f73a062edce3ac03bbbdc9e2d98b58a30617"}, + {file = "onnxruntime-1.19.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be4144d014a4b25184e63ce7a463a2e7796e2f3df931fccc6a6aefa6f1365dc5"}, + {file = "onnxruntime-1.19.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10d7e7d4ca7021ce7f29a66dbc6071addf2de5839135339bd855c6d9c2bba371"}, + {file = "onnxruntime-1.19.0-cp312-cp312-win32.whl", hash = "sha256:87f2c58b577a1fb31dc5d92b647ecc588fd5f1ea0c3ad4526f5f80a113357c8d"}, + {file = "onnxruntime-1.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:8a1f50d49676d7b69566536ff039d9e4e95fc482a55673719f46528218ecbb94"}, + {file = "onnxruntime-1.19.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:71423c8c4b2d7a58956271534302ec72721c62a41efd0c4896343249b8399ab0"}, + {file = "onnxruntime-1.19.0-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9d63630d45e9498f96e75bbeb7fd4a56acb10155de0de4d0e18d1b6cbb0b358a"}, + {file = "onnxruntime-1.19.0-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3bfd15db1e8794d379a86c1a9116889f47f2cca40cc82208fc4f7e8c38e8522"}, + {file = "onnxruntime-1.19.0-cp38-cp38-win32.whl", hash = "sha256:3b098003b6b4cb37cc84942e5f1fe27f945dd857cbd2829c824c26b0ba4a247e"}, + {file = "onnxruntime-1.19.0-cp38-cp38-win_amd64.whl", hash = "sha256:cea067a6541d6787d903ee6843401c5b1332a266585160d9700f9f0939443886"}, + {file = "onnxruntime-1.19.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:c4fcff12dc5ca963c5f76b9822bb404578fa4a98c281e8c666b429192799a099"}, + {file = "onnxruntime-1.19.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f6dcad8a4db908fbe70b98c79cea1c8b6ac3316adf4ce93453136e33a524ac59"}, + {file = "onnxruntime-1.19.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4bc449907c6e8d99eee5ae5cc9c8fdef273d801dcd195393d3f9ab8ad3f49522"}, + {file = "onnxruntime-1.19.0-cp39-cp39-win32.whl", hash = "sha256:947febd48405afcf526e45ccff97ff23b15e530434705f734870d22ae7fcf236"}, + {file = "onnxruntime-1.19.0-cp39-cp39-win_amd64.whl", hash = "sha256:f60be47eff5ee77fd28a466b0fd41d7debc42a32179d1ddb21e05d6067d7b48b"}, ] [package.dependencies] @@ -1957,67 +1288,44 @@ packaging = "*" protobuf = "*" sympy = "*" -[[package]] -name = "openai" -version = "1.35.7" -description = "The official Python library for the openai API" -optional = false -python-versions = ">=3.7.1" -files = [ - {file = "openai-1.35.7-py3-none-any.whl", hash = "sha256:3d1e0b0aac9b0db69a972d36dc7efa7563f8e8d65550b27a48f2a0c2ec207e80"}, - {file = "openai-1.35.7.tar.gz", hash = "sha256:009bfa1504c9c7ef64d87be55936d142325656bbc6d98c68b669d6472e4beb09"}, -] - -[package.dependencies] -anyio = ">=3.5.0,<5" -distro = ">=1.7.0,<2" -httpx = ">=0.23.0,<1" -pydantic = ">=1.9.0,<3" -sniffio = "*" -tqdm = ">4" -typing-extensions = ">=4.7,<5" - -[package.extras] -datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] - [[package]] name = "opentelemetry-api" -version = "1.24.0" +version = "1.26.0" description = "OpenTelemetry Python API" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_api-1.24.0-py3-none-any.whl", hash = "sha256:0f2c363d98d10d1ce93330015ca7fd3a65f60be64e05e30f557c61de52c80ca2"}, - {file = "opentelemetry_api-1.24.0.tar.gz", hash = "sha256:42719f10ce7b5a9a73b10a4baf620574fb8ad495a9cbe5c18d76b75d8689c67e"}, + {file = "opentelemetry_api-1.26.0-py3-none-any.whl", hash = "sha256:7d7ea33adf2ceda2dd680b18b1677e4152000b37ca76e679da71ff103b943064"}, + {file = "opentelemetry_api-1.26.0.tar.gz", hash = "sha256:2bd639e4bed5b18486fef0b5a520aaffde5a18fc225e808a1ac4df363f43a1ce"}, ] [package.dependencies] deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<=7.0" +importlib-metadata = ">=6.0,<=8.0.0" [[package]] name = "opentelemetry-exporter-otlp-proto-common" -version = "1.24.0" +version = "1.26.0" description = "OpenTelemetry Protobuf encoding" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.24.0-py3-none-any.whl", hash = "sha256:e51f2c9735054d598ad2df5d3eca830fecfb5b0bda0a2fa742c9c7718e12f641"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.24.0.tar.gz", hash = "sha256:5d31fa1ff976cacc38be1ec4e3279a3f88435c75b38b1f7a099a1faffc302461"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.26.0-py3-none-any.whl", hash = "sha256:ee4d8f8891a1b9c372abf8d109409e5b81947cf66423fd998e56880057afbc71"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.26.0.tar.gz", hash = "sha256:bdbe50e2e22a1c71acaa0c8ba6efaadd58882e5a5978737a44a4c4b10d304c92"}, ] [package.dependencies] -opentelemetry-proto = "1.24.0" +opentelemetry-proto = "1.26.0" [[package]] name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.24.0" +version = "1.26.0" description = "OpenTelemetry Collector Protobuf over gRPC Exporter" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_exporter_otlp_proto_grpc-1.24.0-py3-none-any.whl", hash = "sha256:f40d62aa30a0a43cc1657428e59fcf82ad5f7ea8fff75de0f9d9cb6f739e0a3b"}, - {file = "opentelemetry_exporter_otlp_proto_grpc-1.24.0.tar.gz", hash = "sha256:217c6e30634f2c9797999ea9da29f7300479a94a610139b9df17433f915e7baa"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.26.0-py3-none-any.whl", hash = "sha256:e2be5eff72ebcb010675b818e8d7c2e7d61ec451755b8de67a140bc49b9b0280"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.26.0.tar.gz", hash = "sha256:a65b67a9a6b06ba1ec406114568e21afe88c1cdb29c464f2507d529eb906d8ae"}, ] [package.dependencies] @@ -2025,22 +1333,19 @@ deprecated = ">=1.2.6" googleapis-common-protos = ">=1.52,<2.0" grpcio = ">=1.0.0,<2.0.0" opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.24.0" -opentelemetry-proto = "1.24.0" -opentelemetry-sdk = ">=1.24.0,<1.25.0" - -[package.extras] -test = ["pytest-grpc"] +opentelemetry-exporter-otlp-proto-common = "1.26.0" +opentelemetry-proto = "1.26.0" +opentelemetry-sdk = ">=1.26.0,<1.27.0" [[package]] name = "opentelemetry-instrumentation" -version = "0.45b0" +version = "0.47b0" description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation-0.45b0-py3-none-any.whl", hash = "sha256:06c02e2c952c1b076e8eaedf1b82f715e2937ba7eeacab55913dd434fbcec258"}, - {file = "opentelemetry_instrumentation-0.45b0.tar.gz", hash = "sha256:6c47120a7970bbeb458e6a73686ee9ba84b106329a79e4a4a66761f933709c7e"}, + {file = "opentelemetry_instrumentation-0.47b0-py3-none-any.whl", hash = "sha256:88974ee52b1db08fc298334b51c19d47e53099c33740e48c4f084bd1afd052d5"}, + {file = "opentelemetry_instrumentation-0.47b0.tar.gz", hash = "sha256:96f9885e450c35e3f16a4f33145f2ebf620aea910c9fd74a392bbc0f807a350f"}, ] [package.dependencies] @@ -2050,55 +1355,55 @@ wrapt = ">=1.0.0,<2.0.0" [[package]] name = "opentelemetry-instrumentation-asgi" -version = "0.45b0" +version = "0.47b0" description = "ASGI instrumentation for OpenTelemetry" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation_asgi-0.45b0-py3-none-any.whl", hash = "sha256:8be1157ed62f0db24e45fdf7933c530c4338bd025c5d4af7830e903c0756021b"}, - {file = "opentelemetry_instrumentation_asgi-0.45b0.tar.gz", hash = "sha256:97f55620f163fd3d20323e9fd8dc3aacc826c03397213ff36b877e0f4b6b08a6"}, + {file = "opentelemetry_instrumentation_asgi-0.47b0-py3-none-any.whl", hash = "sha256:b798dc4957b3edc9dfecb47a4c05809036a4b762234c5071212fda39ead80ade"}, + {file = "opentelemetry_instrumentation_asgi-0.47b0.tar.gz", hash = "sha256:e78b7822c1bca0511e5e9610ec484b8994a81670375e570c76f06f69af7c506a"}, ] [package.dependencies] asgiref = ">=3.0,<4.0" opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.45b0" -opentelemetry-semantic-conventions = "0.45b0" -opentelemetry-util-http = "0.45b0" +opentelemetry-instrumentation = "0.47b0" +opentelemetry-semantic-conventions = "0.47b0" +opentelemetry-util-http = "0.47b0" [package.extras] instruments = ["asgiref (>=3.0,<4.0)"] [[package]] name = "opentelemetry-instrumentation-fastapi" -version = "0.45b0" +version = "0.47b0" description = "OpenTelemetry FastAPI Instrumentation" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation_fastapi-0.45b0-py3-none-any.whl", hash = "sha256:77d9c123a363129148f5f66d44094f3d67aaaa2b201396d94782b4a7f9ce4314"}, - {file = "opentelemetry_instrumentation_fastapi-0.45b0.tar.gz", hash = "sha256:5a6b91e1c08a01601845fcfcfdefd0a2aecdb3c356d4a436a3210cb58c21487e"}, + {file = "opentelemetry_instrumentation_fastapi-0.47b0-py3-none-any.whl", hash = "sha256:5ac28dd401160b02e4f544a85a9e4f61a8cbe5b077ea0379d411615376a2bd21"}, + {file = "opentelemetry_instrumentation_fastapi-0.47b0.tar.gz", hash = "sha256:0c7c10b5d971e99a420678ffd16c5b1ea4f0db3b31b62faf305fbb03b4ebee36"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.45b0" -opentelemetry-instrumentation-asgi = "0.45b0" -opentelemetry-semantic-conventions = "0.45b0" -opentelemetry-util-http = "0.45b0" +opentelemetry-instrumentation = "0.47b0" +opentelemetry-instrumentation-asgi = "0.47b0" +opentelemetry-semantic-conventions = "0.47b0" +opentelemetry-util-http = "0.47b0" [package.extras] -instruments = ["fastapi (>=0.58,<1.0)"] +instruments = ["fastapi (>=0.58,<1.0)", "fastapi-slim (>=0.111.0,<0.112.0)"] [[package]] name = "opentelemetry-proto" -version = "1.24.0" +version = "1.26.0" description = "OpenTelemetry Python Proto" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_proto-1.24.0-py3-none-any.whl", hash = "sha256:bcb80e1e78a003040db71ccf83f2ad2019273d1e0828089d183b18a1476527ce"}, - {file = "opentelemetry_proto-1.24.0.tar.gz", hash = "sha256:ff551b8ad63c6cabb1845ce217a6709358dfaba0f75ea1fa21a61ceddc78cab8"}, + {file = "opentelemetry_proto-1.26.0-py3-none-any.whl", hash = "sha256:6c4d7b4d4d9c88543bcf8c28ae3f8f0448a753dc291c18c5390444c90b76a725"}, + {file = "opentelemetry_proto-1.26.0.tar.gz", hash = "sha256:c5c18796c0cab3751fc3b98dee53855835e90c0422924b484432ac852d93dc1e"}, ] [package.dependencies] @@ -2106,95 +1411,110 @@ protobuf = ">=3.19,<5.0" [[package]] name = "opentelemetry-sdk" -version = "1.24.0" +version = "1.26.0" description = "OpenTelemetry Python SDK" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_sdk-1.24.0-py3-none-any.whl", hash = "sha256:fa731e24efe832e98bcd90902085b359dcfef7d9c9c00eb5b9a18587dae3eb59"}, - {file = "opentelemetry_sdk-1.24.0.tar.gz", hash = "sha256:75bc0563affffa827700e0f4f4a68e1e257db0df13372344aebc6f8a64cde2e5"}, + {file = "opentelemetry_sdk-1.26.0-py3-none-any.whl", hash = "sha256:feb5056a84a88670c041ea0ded9921fca559efec03905dddeb3885525e0af897"}, + {file = "opentelemetry_sdk-1.26.0.tar.gz", hash = "sha256:c90d2868f8805619535c05562d699e2f4fb1f00dbd55a86dcefca4da6fa02f85"}, ] [package.dependencies] -opentelemetry-api = "1.24.0" -opentelemetry-semantic-conventions = "0.45b0" +opentelemetry-api = "1.26.0" +opentelemetry-semantic-conventions = "0.47b0" typing-extensions = ">=3.7.4" [[package]] name = "opentelemetry-semantic-conventions" -version = "0.45b0" +version = "0.47b0" description = "OpenTelemetry Semantic Conventions" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_semantic_conventions-0.45b0-py3-none-any.whl", hash = "sha256:a4a6fb9a7bacd9167c082aa4681009e9acdbfa28ffb2387af50c2fef3d30c864"}, - {file = "opentelemetry_semantic_conventions-0.45b0.tar.gz", hash = "sha256:7c84215a44ac846bc4b8e32d5e78935c5c43482e491812a0bb8aaf87e4d92118"}, + {file = "opentelemetry_semantic_conventions-0.47b0-py3-none-any.whl", hash = "sha256:4ff9d595b85a59c1c1413f02bba320ce7ea6bf9e2ead2b0913c4395c7bbc1063"}, + {file = "opentelemetry_semantic_conventions-0.47b0.tar.gz", hash = "sha256:a8d57999bbe3495ffd4d510de26a97dadc1dace53e0275001b2c1b2f67992a7e"}, ] +[package.dependencies] +deprecated = ">=1.2.6" +opentelemetry-api = "1.26.0" + [[package]] name = "opentelemetry-util-http" -version = "0.45b0" +version = "0.47b0" description = "Web util for OpenTelemetry" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_util_http-0.45b0-py3-none-any.whl", hash = "sha256:6628868b501b3004e1860f976f410eeb3d3499e009719d818000f24ce17b6e33"}, - {file = "opentelemetry_util_http-0.45b0.tar.gz", hash = "sha256:4ce08b6a7d52dd7c96b7705b5b4f06fdb6aa3eac1233b3b0bfef8a0cab9a92cd"}, + {file = "opentelemetry_util_http-0.47b0-py3-none-any.whl", hash = "sha256:3d3215e09c4a723b12da6d0233a31395aeb2bb33a64d7b15a1500690ba250f19"}, + {file = "opentelemetry_util_http-0.47b0.tar.gz", hash = "sha256:352a07664c18eef827eb8ddcbd64c64a7284a39dd1655e2f16f577eb046ccb32"}, ] [[package]] name = "orjson" -version = "3.10.3" +version = "3.10.7" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9fb6c3f9f5490a3eb4ddd46fc1b6eadb0d6fc16fb3f07320149c3286a1409dd8"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252124b198662eee80428f1af8c63f7ff077c88723fe206a25df8dc57a57b1fa"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f3e87733823089a338ef9bbf363ef4de45e5c599a9bf50a7a9b82e86d0228da"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8334c0d87103bb9fbbe59b78129f1f40d1d1e8355bbed2ca71853af15fa4ed3"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1952c03439e4dce23482ac846e7961f9d4ec62086eb98ae76d97bd41d72644d7"}, - {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c0403ed9c706dcd2809f1600ed18f4aae50be263bd7112e54b50e2c2bc3ebd6d"}, - {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:382e52aa4270a037d41f325e7d1dfa395b7de0c367800b6f337d8157367bf3a7"}, - {file = "orjson-3.10.3-cp310-none-win32.whl", hash = "sha256:be2aab54313752c04f2cbaab4515291ef5af8c2256ce22abc007f89f42f49109"}, - {file = "orjson-3.10.3-cp310-none-win_amd64.whl", hash = "sha256:416b195f78ae461601893f482287cee1e3059ec49b4f99479aedf22a20b1098b"}, - {file = "orjson-3.10.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:73100d9abbbe730331f2242c1fc0bcb46a3ea3b4ae3348847e5a141265479700"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a12eee96e3ab828dbfcb4d5a0023aa971b27143a1d35dc214c176fdfb29b3"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520de5e2ef0b4ae546bea25129d6c7c74edb43fc6cf5213f511a927f2b28148b"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccaa0a401fc02e8828a5bedfd80f8cd389d24f65e5ca3954d72c6582495b4bcf"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7bc9e8bc11bac40f905640acd41cbeaa87209e7e1f57ade386da658092dc16"}, - {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3582b34b70543a1ed6944aca75e219e1192661a63da4d039d088a09c67543b08"}, - {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c23dfa91481de880890d17aa7b91d586a4746a4c2aa9a145bebdbaf233768d5"}, - {file = "orjson-3.10.3-cp311-none-win32.whl", hash = "sha256:1770e2a0eae728b050705206d84eda8b074b65ee835e7f85c919f5705b006c9b"}, - {file = "orjson-3.10.3-cp311-none-win_amd64.whl", hash = "sha256:93433b3c1f852660eb5abdc1f4dd0ced2be031ba30900433223b28ee0140cde5"}, - {file = "orjson-3.10.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a39aa73e53bec8d410875683bfa3a8edf61e5a1c7bb4014f65f81d36467ea098"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0943a96b3fa09bee1afdfccc2cb236c9c64715afa375b2af296c73d91c23eab2"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e852baafceff8da3c9defae29414cc8513a1586ad93e45f27b89a639c68e8176"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18566beb5acd76f3769c1d1a7ec06cdb81edc4d55d2765fb677e3eaa10fa99e0"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd2218d5a3aa43060efe649ec564ebedec8ce6ae0a43654b81376216d5ebd42"}, - {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cf20465e74c6e17a104ecf01bf8cd3b7b252565b4ccee4548f18b012ff2f8069"}, - {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7f67aa7f983c4345eeda16054a4677289011a478ca947cd69c0a86ea45e534"}, - {file = "orjson-3.10.3-cp312-none-win32.whl", hash = "sha256:17e0713fc159abc261eea0f4feda611d32eabc35708b74bef6ad44f6c78d5ea0"}, - {file = "orjson-3.10.3-cp312-none-win_amd64.whl", hash = "sha256:4c895383b1ec42b017dd2c75ae8a5b862fc489006afde06f14afbdd0309b2af0"}, - {file = "orjson-3.10.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:be2719e5041e9fb76c8c2c06b9600fe8e8584e6980061ff88dcbc2691a16d20d"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0175a5798bdc878956099f5c54b9837cb62cfbf5d0b86ba6d77e43861bcec2"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978be58a68ade24f1af7758626806e13cff7748a677faf95fbb298359aa1e20d"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16bda83b5c61586f6f788333d3cf3ed19015e3b9019188c56983b5a299210eb5"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad1f26bea425041e0a1adad34630c4825a9e3adec49079b1fb6ac8d36f8b754"}, - {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9e253498bee561fe85d6325ba55ff2ff08fb5e7184cd6a4d7754133bd19c9195"}, - {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a62f9968bab8a676a164263e485f30a0b748255ee2f4ae49a0224be95f4532b"}, - {file = "orjson-3.10.3-cp38-none-win32.whl", hash = "sha256:8d0b84403d287d4bfa9bf7d1dc298d5c1c5d9f444f3737929a66f2fe4fb8f134"}, - {file = "orjson-3.10.3-cp38-none-win_amd64.whl", hash = "sha256:8bc7a4df90da5d535e18157220d7915780d07198b54f4de0110eca6b6c11e290"}, - {file = "orjson-3.10.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9059d15c30e675a58fdcd6f95465c1522b8426e092de9fff20edebfdc15e1cb0"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d40c7f7938c9c2b934b297412c067936d0b54e4b8ab916fd1a9eb8f54c02294"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a654ec1de8fdaae1d80d55cee65893cb06494e124681ab335218be6a0691e7"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:831c6ef73f9aa53c5f40ae8f949ff7681b38eaddb6904aab89dca4d85099cb78"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99b880d7e34542db89f48d14ddecbd26f06838b12427d5a25d71baceb5ba119d"}, - {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e5e176c994ce4bd434d7aafb9ecc893c15f347d3d2bbd8e7ce0b63071c52e25"}, - {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b69a58a37dab856491bf2d3bbf259775fdce262b727f96aafbda359cb1d114d8"}, - {file = "orjson-3.10.3-cp39-none-win32.whl", hash = "sha256:b8d4d1a6868cde356f1402c8faeb50d62cee765a1f7ffcfd6de732ab0581e063"}, - {file = "orjson-3.10.3-cp39-none-win_amd64.whl", hash = "sha256:5102f50c5fc46d94f2033fe00d392588564378260d64377aec702f21a7a22912"}, - {file = "orjson-3.10.3.tar.gz", hash = "sha256:2b166507acae7ba2f7c315dcf185a9111ad5e992ac81f2d507aac39193c2c818"}, + {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84"}, + {file = "orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175"}, + {file = "orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c"}, + {file = "orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0"}, + {file = "orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f"}, + {file = "orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5"}, + {file = "orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b"}, + {file = "orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb"}, + {file = "orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1"}, + {file = "orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149"}, + {file = "orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad"}, + {file = "orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2"}, + {file = "orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024"}, + {file = "orjson-3.10.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866"}, + {file = "orjson-3.10.7-cp38-none-win32.whl", hash = "sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c"}, + {file = "orjson-3.10.7-cp38-none-win_amd64.whl", hash = "sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e"}, + {file = "orjson-3.10.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5"}, + {file = "orjson-3.10.7-cp39-none-win32.whl", hash = "sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2"}, + {file = "orjson-3.10.7-cp39-none-win_amd64.whl", hash = "sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58"}, + {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, ] [[package]] @@ -2210,13 +1530,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -2259,22 +1579,22 @@ test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint" [[package]] name = "protobuf" -version = "4.25.3" +version = "4.25.4" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, - {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, - {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, - {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, - {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, - {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, - {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, - {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, - {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, + {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, + {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, + {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, + {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, + {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, + {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, + {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, + {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, + {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, + {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, + {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, ] [[package]] @@ -2304,109 +1624,122 @@ pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pydantic" -version = "2.7.4" +version = "2.8.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, - {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.18.4" -typing-extensions = ">=4.6.1" +pydantic-core = "2.20.1" +typing-extensions = [ + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, +] [package.extras] email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.18.4" +version = "2.20.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, - {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, - {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, - {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, - {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, - {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, - {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, - {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, - {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, - {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, - {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, - {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, - {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, - {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, ] [package.dependencies] @@ -2558,177 +1891,77 @@ files = [ [package.extras] cli = ["click (>=5.0)"] -[[package]] -name = "python-multipart" -version = "0.0.9" -description = "A streaming multipart parser for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"}, - {file = "python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026"}, -] - -[package.extras] -dev = ["atomicwrites (==1.4.1)", "attrs (==23.2.0)", "coverage (==7.4.1)", "hatch", "invoke (==2.2.0)", "more-itertools (==10.2.0)", "pbr (==6.0.0)", "pluggy (==1.4.0)", "py (==1.11.0)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.2.0)", "pyyaml (==6.0.1)", "ruff (==0.2.1)"] - [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "regex" -version = "2024.5.15" -description = "Alternative regular expression module, to replace re." -optional = false python-versions = ">=3.8" files = [ - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53"}, - {file = "regex-2024.5.15-cp310-cp310-win32.whl", hash = "sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3"}, - {file = "regex-2024.5.15-cp310-cp310-win_amd64.whl", hash = "sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68"}, - {file = "regex-2024.5.15-cp311-cp311-win32.whl", hash = "sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa"}, - {file = "regex-2024.5.15-cp311-cp311-win_amd64.whl", hash = "sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, - {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, - {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741"}, - {file = "regex-2024.5.15-cp38-cp38-win32.whl", hash = "sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9"}, - {file = "regex-2024.5.15-cp38-cp38-win_amd64.whl", hash = "sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456"}, - {file = "regex-2024.5.15-cp39-cp39-win32.whl", hash = "sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694"}, - {file = "regex-2024.5.15-cp39-cp39-win_amd64.whl", hash = "sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388"}, - {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -2794,46 +2027,46 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.5.0" +version = "0.5.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.0-py3-none-linux_armv6l.whl", hash = "sha256:ee770ea8ab38918f34e7560a597cc0a8c9a193aaa01bfbd879ef43cb06bd9c4c"}, - {file = "ruff-0.5.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:38f3b8327b3cb43474559d435f5fa65dacf723351c159ed0dc567f7ab735d1b6"}, - {file = "ruff-0.5.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7594f8df5404a5c5c8f64b8311169879f6cf42142da644c7e0ba3c3f14130370"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:adc7012d6ec85032bc4e9065110df205752d64010bed5f958d25dbee9ce35de3"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d505fb93b0fabef974b168d9b27c3960714d2ecda24b6ffa6a87ac432905ea38"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dc5cfd3558f14513ed0d5b70ce531e28ea81a8a3b1b07f0f48421a3d9e7d80a"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:db3ca35265de239a1176d56a464b51557fce41095c37d6c406e658cf80bbb362"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b1a321c4f68809fddd9b282fab6a8d8db796b270fff44722589a8b946925a2a8"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c4dfcd8d34b143916994b3876b63d53f56724c03f8c1a33a253b7b1e6bf2a7d"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81e5facfc9f4a674c6a78c64d38becfbd5e4f739c31fcd9ce44c849f1fad9e4c"}, - {file = "ruff-0.5.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e589e27971c2a3efff3fadafb16e5aef7ff93250f0134ec4b52052b673cf988d"}, - {file = "ruff-0.5.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2ffbc3715a52b037bcb0f6ff524a9367f642cdc5817944f6af5479bbb2eb50e"}, - {file = "ruff-0.5.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cd096e23c6a4f9c819525a437fa0a99d1c67a1b6bb30948d46f33afbc53596cf"}, - {file = "ruff-0.5.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:46e193b36f2255729ad34a49c9a997d506e58f08555366b2108783b3064a0e1e"}, - {file = "ruff-0.5.0-py3-none-win32.whl", hash = "sha256:49141d267100f5ceff541b4e06552e98527870eafa1acc9dec9139c9ec5af64c"}, - {file = "ruff-0.5.0-py3-none-win_amd64.whl", hash = "sha256:e9118f60091047444c1b90952736ee7b1792910cab56e9b9a9ac20af94cd0440"}, - {file = "ruff-0.5.0-py3-none-win_arm64.whl", hash = "sha256:ed5c4df5c1fb4518abcb57725b576659542bdbe93366f4f329e8f398c4b71178"}, - {file = "ruff-0.5.0.tar.gz", hash = "sha256:eb641b5873492cf9bd45bc9c5ae5320648218e04386a5f0c264ad6ccce8226a1"}, + {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, + {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, + {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, + {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, + {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, + {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, + {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, ] [[package]] name = "setuptools" -version = "69.5.1" +version = "73.0.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, - {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, + {file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"}, + {file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] [[package]] name = "shellingham" @@ -2868,102 +2101,15 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] -[[package]] -name = "sqlalchemy" -version = "2.0.30" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3b48154678e76445c7ded1896715ce05319f74b1e73cf82d4f8b59b46e9c0ddc"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2753743c2afd061bb95a61a51bbb6a1a11ac1c44292fad898f10c9839a7f75b2"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7bfc726d167f425d4c16269a9a10fe8630ff6d14b683d588044dcef2d0f6be7"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4f61ada6979223013d9ab83a3ed003ded6959eae37d0d685db2c147e9143797"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a365eda439b7a00732638f11072907c1bc8e351c7665e7e5da91b169af794af"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bba002a9447b291548e8d66fd8c96a6a7ed4f2def0bb155f4f0a1309fd2735d5"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-win32.whl", hash = "sha256:0138c5c16be3600923fa2169532205d18891b28afa817cb49b50e08f62198bb8"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-win_amd64.whl", hash = "sha256:99650e9f4cf3ad0d409fed3eec4f071fadd032e9a5edc7270cd646a26446feeb"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:955991a09f0992c68a499791a753523f50f71a6885531568404fa0f231832aa0"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f69e4c756ee2686767eb80f94c0125c8b0a0b87ede03eacc5c8ae3b54b99dc46"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69c9db1ce00e59e8dd09d7bae852a9add716efdc070a3e2068377e6ff0d6fdaa"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1429a4b0f709f19ff3b0cf13675b2b9bfa8a7e79990003207a011c0db880a13"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:efedba7e13aa9a6c8407c48facfdfa108a5a4128e35f4c68f20c3407e4376aa9"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16863e2b132b761891d6c49f0a0f70030e0bcac4fd208117f6b7e053e68668d0"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-win32.whl", hash = "sha256:2ecabd9ccaa6e914e3dbb2aa46b76dede7eadc8cbf1b8083c94d936bcd5ffb49"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-win_amd64.whl", hash = "sha256:0b3f4c438e37d22b83e640f825ef0f37b95db9aa2d68203f2c9549375d0b2260"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5a79d65395ac5e6b0c2890935bad892eabb911c4aa8e8015067ddb37eea3d56c"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9a5baf9267b752390252889f0c802ea13b52dfee5e369527da229189b8bd592e"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cb5a646930c5123f8461f6468901573f334c2c63c795b9af350063a736d0134"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:296230899df0b77dec4eb799bcea6fbe39a43707ce7bb166519c97b583cfcab3"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c62d401223f468eb4da32627bffc0c78ed516b03bb8a34a58be54d618b74d472"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3b69e934f0f2b677ec111b4d83f92dc1a3210a779f69bf905273192cf4ed433e"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-win32.whl", hash = "sha256:77d2edb1f54aff37e3318f611637171e8ec71472f1fdc7348b41dcb226f93d90"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-win_amd64.whl", hash = "sha256:b6c7ec2b1f4969fc19b65b7059ed00497e25f54069407a8701091beb69e591a5"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a8e3b0a7e09e94be7510d1661339d6b52daf202ed2f5b1f9f48ea34ee6f2d57"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b60203c63e8f984df92035610c5fb76d941254cf5d19751faab7d33b21e5ddc0"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1dc3eabd8c0232ee8387fbe03e0a62220a6f089e278b1f0aaf5e2d6210741ad"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:40ad017c672c00b9b663fcfcd5f0864a0a97828e2ee7ab0c140dc84058d194cf"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e42203d8d20dc704604862977b1470a122e4892791fe3ed165f041e4bf447a1b"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-win32.whl", hash = "sha256:2a4f4da89c74435f2bc61878cd08f3646b699e7d2eba97144030d1be44e27584"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-win_amd64.whl", hash = "sha256:b6bf767d14b77f6a18b6982cbbf29d71bede087edae495d11ab358280f304d8e"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc0c53579650a891f9b83fa3cecd4e00218e071d0ba00c4890f5be0c34887ed3"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:311710f9a2ee235f1403537b10c7687214bb1f2b9ebb52702c5aa4a77f0b3af7"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:408f8b0e2c04677e9c93f40eef3ab22f550fecb3011b187f66a096395ff3d9fd"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37a4b4fb0dd4d2669070fb05b8b8824afd0af57587393015baee1cf9890242d9"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a943d297126c9230719c27fcbbeab57ecd5d15b0bd6bfd26e91bfcfe64220621"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a089e218654e740a41388893e090d2e2c22c29028c9d1353feb38638820bbeb"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-win32.whl", hash = "sha256:fa561138a64f949f3e889eb9ab8c58e1504ab351d6cf55259dc4c248eaa19da6"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-win_amd64.whl", hash = "sha256:7d74336c65705b986d12a7e337ba27ab2b9d819993851b140efdf029248e818e"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8c62fe2480dd61c532ccafdbce9b29dacc126fe8be0d9a927ca3e699b9491a"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2383146973a15435e4717f94c7509982770e3e54974c71f76500a0136f22810b"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8409de825f2c3b62ab15788635ccaec0c881c3f12a8af2b12ae4910a0a9aeef6"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0094c5dc698a5f78d3d1539853e8ecec02516b62b8223c970c86d44e7a80f6c7"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:edc16a50f5e1b7a06a2dcc1f2205b0b961074c123ed17ebda726f376a5ab0953"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f7703c2010355dd28f53deb644a05fc30f796bd8598b43f0ba678878780b6e4c"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-win32.whl", hash = "sha256:1f9a727312ff6ad5248a4367358e2cf7e625e98b1028b1d7ab7b806b7d757513"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-win_amd64.whl", hash = "sha256:a0ef36b28534f2a5771191be6edb44cc2673c7b2edf6deac6562400288664221"}, - {file = "SQLAlchemy-2.0.30-py3-none-any.whl", hash = "sha256:7108d569d3990c71e26a42f60474b4c02c8586c4681af5fd67e51a044fdea86a"}, - {file = "SQLAlchemy-2.0.30.tar.gz", hash = "sha256:2b1708916730f4830bc69d6f49d37f7698b5bd7530aca7f04f785f8849e95255"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - [[package]] name = "starlette" -version = "0.37.2" +version = "0.38.2" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, - {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, + {file = "starlette-0.38.2-py3-none-any.whl", hash = "sha256:4ec6a59df6bbafdab5f567754481657f7ed90dc9d69b0c9ff017907dd54faeff"}, + {file = "starlette-0.38.2.tar.gz", hash = "sha256:c7c0441065252160993a1a37cf2a73bb64d271b17303e0b0c1eb7191cfb12d75"}, ] [package.dependencies] @@ -2975,17 +2121,20 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7 [[package]] name = "sympy" -version = "1.12" +version = "1.13.2" description = "Computer algebra system (CAS) in Python" optional = false python-versions = ">=3.8" files = [ - {file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"}, - {file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"}, + {file = "sympy-1.13.2-py3-none-any.whl", hash = "sha256:c51d75517712f1aed280d4ce58506a4a88d635d6b5dd48b39102a7ae1f3fcfe9"}, + {file = "sympy-1.13.2.tar.gz", hash = "sha256:401449d84d07be9d0c7a46a64bd54fe097667d5e7181bfe67ec777be9e01cb13"}, ] [package.dependencies] -mpmath = ">=0.19" +mpmath = ">=1.1.0,<1.4" + +[package.extras] +dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] [[package]] name = "syrupy" @@ -3003,178 +2152,126 @@ pytest = ">=7.0.0,<9.0.0" [[package]] name = "tenacity" -version = "8.3.0" +version = "8.5.0" description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" files = [ - {file = "tenacity-8.3.0-py3-none-any.whl", hash = "sha256:3649f6443dbc0d9b01b9d8020a9c4ec7a1ff5f6f3c6c8a036ef371f573fe9185"}, - {file = "tenacity-8.3.0.tar.gz", hash = "sha256:953d4e6ad24357bceffbc9707bc74349aca9d245f68eb65419cf0c249a1949a2"}, + {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, + {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, ] [package.extras] doc = ["reno", "sphinx"] test = ["pytest", "tornado (>=4.5)", "typeguard"] -[[package]] -name = "tiktoken" -version = "0.7.0" -description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tiktoken-0.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485f3cc6aba7c6b6ce388ba634fbba656d9ee27f766216f45146beb4ac18b25f"}, - {file = "tiktoken-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e54be9a2cd2f6d6ffa3517b064983fb695c9a9d8aa7d574d1ef3c3f931a99225"}, - {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79383a6e2c654c6040e5f8506f3750db9ddd71b550c724e673203b4f6b4b4590"}, - {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d4511c52caacf3c4981d1ae2df85908bd31853f33d30b345c8b6830763f769c"}, - {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13c94efacdd3de9aff824a788353aa5749c0faee1fbe3816df365ea450b82311"}, - {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8e58c7eb29d2ab35a7a8929cbeea60216a4ccdf42efa8974d8e176d50c9a3df5"}, - {file = "tiktoken-0.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:21a20c3bd1dd3e55b91c1331bf25f4af522c525e771691adbc9a69336fa7f702"}, - {file = "tiktoken-0.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:10c7674f81e6e350fcbed7c09a65bca9356eaab27fb2dac65a1e440f2bcfe30f"}, - {file = "tiktoken-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:084cec29713bc9d4189a937f8a35dbdfa785bd1235a34c1124fe2323821ee93f"}, - {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811229fde1652fedcca7c6dfe76724d0908775b353556d8a71ed74d866f73f7b"}, - {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b6e7dc2e7ad1b3757e8a24597415bafcfb454cebf9a33a01f2e6ba2e663992"}, - {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1063c5748be36344c7e18c7913c53e2cca116764c2080177e57d62c7ad4576d1"}, - {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:20295d21419bfcca092644f7e2f2138ff947a6eb8cfc732c09cc7d76988d4a89"}, - {file = "tiktoken-0.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:959d993749b083acc57a317cbc643fb85c014d055b2119b739487288f4e5d1cb"}, - {file = "tiktoken-0.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:71c55d066388c55a9c00f61d2c456a6086673ab7dec22dd739c23f77195b1908"}, - {file = "tiktoken-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09ed925bccaa8043e34c519fbb2f99110bd07c6fd67714793c21ac298e449410"}, - {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03c6c40ff1db0f48a7b4d2dafeae73a5607aacb472fa11f125e7baf9dce73704"}, - {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20b5c6af30e621b4aca094ee61777a44118f52d886dbe4f02b70dfe05c15350"}, - {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d427614c3e074004efa2f2411e16c826f9df427d3c70a54725cae860f09e4bf4"}, - {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c46d7af7b8c6987fac9b9f61041b452afe92eb087d29c9ce54951280f899a97"}, - {file = "tiktoken-0.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0bc603c30b9e371e7c4c7935aba02af5994a909fc3c0fe66e7004070858d3f8f"}, - {file = "tiktoken-0.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2398fecd38c921bcd68418675a6d155fad5f5e14c2e92fcf5fe566fa5485a858"}, - {file = "tiktoken-0.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f5f6afb52fb8a7ea1c811e435e4188f2bef81b5e0f7a8635cc79b0eef0193d6"}, - {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:861f9ee616766d736be4147abac500732b505bf7013cfaf019b85892637f235e"}, - {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54031f95c6939f6b78122c0aa03a93273a96365103793a22e1793ee86da31685"}, - {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fffdcb319b614cf14f04d02a52e26b1d1ae14a570f90e9b55461a72672f7b13d"}, - {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c72baaeaefa03ff9ba9688624143c858d1f6b755bb85d456d59e529e17234769"}, - {file = "tiktoken-0.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:131b8aeb043a8f112aad9f46011dced25d62629091e51d9dc1adbf4a1cc6aa98"}, - {file = "tiktoken-0.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cabc6dc77460df44ec5b879e68692c63551ae4fae7460dd4ff17181df75f1db7"}, - {file = "tiktoken-0.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8d57f29171255f74c0aeacd0651e29aa47dff6f070cb9f35ebc14c82278f3b25"}, - {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ee92776fdbb3efa02a83f968c19d4997a55c8e9ce7be821ceee04a1d1ee149c"}, - {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e215292e99cb41fbc96988ef62ea63bb0ce1e15f2c147a61acc319f8b4cbe5bf"}, - {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a81bac94769cab437dd3ab0b8a4bc4e0f9cf6835bcaa88de71f39af1791727a"}, - {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d6d73ea93e91d5ca771256dfc9d1d29f5a554b83821a1dc0891987636e0ae226"}, - {file = "tiktoken-0.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:2bcb28ddf79ffa424f171dfeef9a4daff61a94c631ca6813f43967cb263b83b9"}, - {file = "tiktoken-0.7.0.tar.gz", hash = "sha256:1077266e949c24e0291f6c350433c6f0971365ece2b173a23bc3b9f9defef6b6"}, -] - -[package.dependencies] -regex = ">=2022.1.18" -requests = ">=2.26.0" - -[package.extras] -blobfile = ["blobfile (>=2)"] - [[package]] name = "tokenizers" -version = "0.19.1" +version = "0.20.0" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "tokenizers-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:952078130b3d101e05ecfc7fc3640282d74ed26bcf691400f872563fca15ac97"}, - {file = "tokenizers-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82c8b8063de6c0468f08e82c4e198763e7b97aabfe573fd4cf7b33930ca4df77"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f03727225feaf340ceeb7e00604825addef622d551cbd46b7b775ac834c1e1c4"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:453e4422efdfc9c6b6bf2eae00d5e323f263fff62b29a8c9cd526c5003f3f642"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02e81bf089ebf0e7f4df34fa0207519f07e66d8491d963618252f2e0729e0b46"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b07c538ba956843833fee1190cf769c60dc62e1cf934ed50d77d5502194d63b1"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28cab1582e0eec38b1f38c1c1fb2e56bce5dc180acb1724574fc5f47da2a4fe"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b01afb7193d47439f091cd8f070a1ced347ad0f9144952a30a41836902fe09e"}, - {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7fb297edec6c6841ab2e4e8f357209519188e4a59b557ea4fafcf4691d1b4c98"}, - {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e8a3dd055e515df7054378dc9d6fa8c8c34e1f32777fb9a01fea81496b3f9d3"}, - {file = "tokenizers-0.19.1-cp310-none-win32.whl", hash = "sha256:7ff898780a155ea053f5d934925f3902be2ed1f4d916461e1a93019cc7250837"}, - {file = "tokenizers-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:bea6f9947e9419c2fda21ae6c32871e3d398cba549b93f4a65a2d369662d9403"}, - {file = "tokenizers-0.19.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5c88d1481f1882c2e53e6bb06491e474e420d9ac7bdff172610c4f9ad3898059"}, - {file = "tokenizers-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddf672ed719b4ed82b51499100f5417d7d9f6fb05a65e232249268f35de5ed14"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dadc509cc8a9fe460bd274c0e16ac4184d0958117cf026e0ea8b32b438171594"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfedf31824ca4915b511b03441784ff640378191918264268e6923da48104acc"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac11016d0a04aa6487b1513a3a36e7bee7eec0e5d30057c9c0408067345c48d2"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76951121890fea8330d3a0df9a954b3f2a37e3ec20e5b0530e9a0044ca2e11fe"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b342d2ce8fc8d00f376af068e3274e2e8649562e3bc6ae4a67784ded6b99428d"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16ff18907f4909dca9b076b9c2d899114dd6abceeb074eca0c93e2353f943aa"}, - {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:706a37cc5332f85f26efbe2bdc9ef8a9b372b77e4645331a405073e4b3a8c1c6"}, - {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16baac68651701364b0289979ecec728546133e8e8fe38f66fe48ad07996b88b"}, - {file = "tokenizers-0.19.1-cp311-none-win32.whl", hash = "sha256:9ed240c56b4403e22b9584ee37d87b8bfa14865134e3e1c3fb4b2c42fafd3256"}, - {file = "tokenizers-0.19.1-cp311-none-win_amd64.whl", hash = "sha256:ad57d59341710b94a7d9dbea13f5c1e7d76fd8d9bcd944a7a6ab0b0da6e0cc66"}, - {file = "tokenizers-0.19.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:621d670e1b1c281a1c9698ed89451395d318802ff88d1fc1accff0867a06f153"}, - {file = "tokenizers-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d924204a3dbe50b75630bd16f821ebda6a5f729928df30f582fb5aade90c818a"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f3fefdc0446b1a1e6d81cd4c07088ac015665d2e812f6dbba4a06267d1a2c95"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9620b78e0b2d52ef07b0d428323fb34e8ea1219c5eac98c2596311f20f1f9266"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04ce49e82d100594715ac1b2ce87d1a36e61891a91de774755f743babcd0dd52"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5c2ff13d157afe413bf7e25789879dd463e5a4abfb529a2d8f8473d8042e28f"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3174c76efd9d08f836bfccaca7cfec3f4d1c0a4cf3acbc7236ad577cc423c840"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9d5b6c0e7a1e979bec10ff960fae925e947aab95619a6fdb4c1d8ff3708ce3"}, - {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a179856d1caee06577220ebcfa332af046d576fb73454b8f4d4b0ba8324423ea"}, - {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:952b80dac1a6492170f8c2429bd11fcaa14377e097d12a1dbe0ef2fb2241e16c"}, - {file = "tokenizers-0.19.1-cp312-none-win32.whl", hash = "sha256:01d62812454c188306755c94755465505836fd616f75067abcae529c35edeb57"}, - {file = "tokenizers-0.19.1-cp312-none-win_amd64.whl", hash = "sha256:b70bfbe3a82d3e3fb2a5e9b22a39f8d1740c96c68b6ace0086b39074f08ab89a"}, - {file = "tokenizers-0.19.1-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:bb9dfe7dae85bc6119d705a76dc068c062b8b575abe3595e3c6276480e67e3f1"}, - {file = "tokenizers-0.19.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:1f0360cbea28ea99944ac089c00de7b2e3e1c58f479fb8613b6d8d511ce98267"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:71e3ec71f0e78780851fef28c2a9babe20270404c921b756d7c532d280349214"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b82931fa619dbad979c0ee8e54dd5278acc418209cc897e42fac041f5366d626"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e8ff5b90eabdcdaa19af697885f70fe0b714ce16709cf43d4952f1f85299e73a"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e742d76ad84acbdb1a8e4694f915fe59ff6edc381c97d6dfdd054954e3478ad4"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8c5d59d7b59885eab559d5bc082b2985555a54cda04dda4c65528d90ad252ad"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b2da5c32ed869bebd990c9420df49813709e953674c0722ff471a116d97b22d"}, - {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:638e43936cc8b2cbb9f9d8dde0fe5e7e30766a3318d2342999ae27f68fdc9bd6"}, - {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:78e769eb3b2c79687d9cb0f89ef77223e8e279b75c0a968e637ca7043a84463f"}, - {file = "tokenizers-0.19.1-cp37-none-win32.whl", hash = "sha256:72791f9bb1ca78e3ae525d4782e85272c63faaef9940d92142aa3eb79f3407a3"}, - {file = "tokenizers-0.19.1-cp37-none-win_amd64.whl", hash = "sha256:f3bbb7a0c5fcb692950b041ae11067ac54826204318922da754f908d95619fbc"}, - {file = "tokenizers-0.19.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:07f9295349bbbcedae8cefdbcfa7f686aa420be8aca5d4f7d1ae6016c128c0c5"}, - {file = "tokenizers-0.19.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10a707cc6c4b6b183ec5dbfc5c34f3064e18cf62b4a938cb41699e33a99e03c1"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6309271f57b397aa0aff0cbbe632ca9d70430839ca3178bf0f06f825924eca22"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ad23d37d68cf00d54af184586d79b84075ada495e7c5c0f601f051b162112dc"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:427c4f0f3df9109314d4f75b8d1f65d9477033e67ffaec4bca53293d3aca286d"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e83a31c9cf181a0a3ef0abad2b5f6b43399faf5da7e696196ddd110d332519ee"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c27b99889bd58b7e301468c0838c5ed75e60c66df0d4db80c08f43462f82e0d3"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bac0b0eb952412b0b196ca7a40e7dce4ed6f6926489313414010f2e6b9ec2adf"}, - {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8a6298bde623725ca31c9035a04bf2ef63208d266acd2bed8c2cb7d2b7d53ce6"}, - {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:08a44864e42fa6d7d76d7be4bec62c9982f6f6248b4aa42f7302aa01e0abfd26"}, - {file = "tokenizers-0.19.1-cp38-none-win32.whl", hash = "sha256:1de5bc8652252d9357a666e609cb1453d4f8e160eb1fb2830ee369dd658e8975"}, - {file = "tokenizers-0.19.1-cp38-none-win_amd64.whl", hash = "sha256:0bcce02bf1ad9882345b34d5bd25ed4949a480cf0e656bbd468f4d8986f7a3f1"}, - {file = "tokenizers-0.19.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0b9394bd204842a2a1fd37fe29935353742be4a3460b6ccbaefa93f58a8df43d"}, - {file = "tokenizers-0.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4692ab92f91b87769d950ca14dbb61f8a9ef36a62f94bad6c82cc84a51f76f6a"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6258c2ef6f06259f70a682491c78561d492e885adeaf9f64f5389f78aa49a051"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c85cf76561fbd01e0d9ea2d1cbe711a65400092bc52b5242b16cfd22e51f0c58"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:670b802d4d82bbbb832ddb0d41df7015b3e549714c0e77f9bed3e74d42400fbe"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85aa3ab4b03d5e99fdd31660872249df5e855334b6c333e0bc13032ff4469c4a"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbf001afbbed111a79ca47d75941e9e5361297a87d186cbfc11ed45e30b5daba"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c89aa46c269e4e70c4d4f9d6bc644fcc39bb409cb2a81227923404dd6f5227"}, - {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:39c1ec76ea1027438fafe16ecb0fb84795e62e9d643444c1090179e63808c69d"}, - {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c2a0d47a89b48d7daa241e004e71fb5a50533718897a4cd6235cb846d511a478"}, - {file = "tokenizers-0.19.1-cp39-none-win32.whl", hash = "sha256:61b7fe8886f2e104d4caf9218b157b106207e0f2a4905c9c7ac98890688aabeb"}, - {file = "tokenizers-0.19.1-cp39-none-win_amd64.whl", hash = "sha256:f97660f6c43efd3e0bfd3f2e3e5615bf215680bad6ee3d469df6454b8c6e8256"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b11853f17b54c2fe47742c56d8a33bf49ce31caf531e87ac0d7d13d327c9334"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d26194ef6c13302f446d39972aaa36a1dda6450bc8949f5eb4c27f51191375bd"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e8d1ed93beda54bbd6131a2cb363a576eac746d5c26ba5b7556bc6f964425594"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca407133536f19bdec44b3da117ef0d12e43f6d4b56ac4c765f37eca501c7bda"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce05fde79d2bc2e46ac08aacbc142bead21614d937aac950be88dc79f9db9022"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:35583cd46d16f07c054efd18b5d46af4a2f070a2dd0a47914e66f3ff5efb2b1e"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:43350270bfc16b06ad3f6f07eab21f089adb835544417afda0f83256a8bf8b75"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b4399b59d1af5645bcee2072a463318114c39b8547437a7c2d6a186a1b5a0e2d"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6852c5b2a853b8b0ddc5993cd4f33bfffdca4fcc5d52f89dd4b8eada99379285"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd266ae85c3d39df2f7e7d0e07f6c41a55e9a3123bb11f854412952deacd828"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecb2651956eea2aa0a2d099434134b1b68f1c31f9a5084d6d53f08ed43d45ff2"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:b279ab506ec4445166ac476fb4d3cc383accde1ea152998509a94d82547c8e2a"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:89183e55fb86e61d848ff83753f64cded119f5d6e1f553d14ffee3700d0a4a49"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2edbc75744235eea94d595a8b70fe279dd42f3296f76d5a86dde1d46e35f574"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0e64bfde9a723274e9a71630c3e9494ed7b4c0f76a1faacf7fe294cd26f7ae7c"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b5ca92bfa717759c052e345770792d02d1f43b06f9e790ca0a1db62838816f3"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f8a20266e695ec9d7a946a019c1d5ca4eddb6613d4f466888eee04f16eedb85"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c38f45d8f2a2ec0f3a20073cccb335b9f99f73b3c69483cd52ebc75369d8a1"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dd26e3afe8a7b61422df3176e06664503d3f5973b94f45d5c45987e1cb711876"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:eddd5783a4a6309ce23432353cdb36220e25cbb779bfa9122320666508b44b88"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:56ae39d4036b753994476a1b935584071093b55c7a72e3b8288e68c313ca26e7"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f9939ca7e58c2758c01b40324a59c034ce0cebad18e0d4563a9b1beab3018243"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c330c0eb815d212893c67a032e9dc1b38a803eccb32f3e8172c19cc69fbb439"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec11802450a2487cdf0e634b750a04cbdc1c4d066b97d94ce7dd2cb51ebb325b"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b718f316b596f36e1dae097a7d5b91fc5b85e90bf08b01ff139bd8953b25af"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ed69af290c2b65169f0ba9034d1dc39a5db9459b32f1dd8b5f3f32a3fcf06eab"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f8a9c828277133af13f3859d1b6bf1c3cb6e9e1637df0e45312e6b7c2e622b1f"}, - {file = "tokenizers-0.19.1.tar.gz", hash = "sha256:ee59e6680ed0fdbe6b724cf38bd70400a0c1dd623b07ac729087270caeac88e3"}, + {file = "tokenizers-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6cff5c5e37c41bc5faa519d6f3df0679e4b37da54ea1f42121719c5e2b4905c0"}, + {file = "tokenizers-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:62a56bf75c27443432456f4ca5ca055befa95e25be8a28141cc495cac8ae4d6d"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68cc7de6a63f09c4a86909c2597b995aa66e19df852a23aea894929c74369929"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:053c37ecee482cc958fdee53af3c6534286a86f5d35aac476f7c246830e53ae5"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d7074aaabc151a6363fa03db5493fc95b423b2a1874456783989e96d541c7b6"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a11435780f2acd89e8fefe5e81cecf01776f6edb9b3ac95bcb76baee76b30b90"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9a81cd2712973b007d84268d45fc3f6f90a79c31dfe7f1925e6732f8d2959987"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7dfd796ab9d909f76fb93080e1c7c8309f196ecb316eb130718cd5e34231c69"}, + {file = "tokenizers-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8029ad2aa8cb00605c9374566034c1cc1b15130713e0eb5afcef6cface8255c9"}, + {file = "tokenizers-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ca4d54260ebe97d59dfa9a30baa20d0c4dd9137d99a8801700055c561145c24e"}, + {file = "tokenizers-0.20.0-cp310-none-win32.whl", hash = "sha256:95ee16b57cec11b86a7940174ec5197d506439b0f415ab3859f254b1dffe9df0"}, + {file = "tokenizers-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:0a61a11e93eeadbf02aea082ffc75241c4198e0608bbbac4f65a9026851dcf37"}, + {file = "tokenizers-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6636b798b3c4d6c9b1af1a918bd07c867808e5a21c64324e95318a237e6366c3"}, + {file = "tokenizers-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ec603e42eaf499ffd58b9258162add948717cf21372458132f14e13a6bc7172"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cce124264903a8ea6f8f48e1cc7669e5ef638c18bd4ab0a88769d5f92debdf7f"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07bbeba0231cf8de07aa6b9e33e9779ff103d47042eeeb859a8c432e3292fb98"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06c0ca8397b35d38b83a44a9c6929790c1692957d88541df061cb34d82ebbf08"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca6557ac3b83d912dfbb1f70ab56bd4b0594043916688e906ede09f42e192401"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a5ad94c9e80ac6098328bee2e3264dbced4c6faa34429994d473f795ec58ef4"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b5c7f906ee6bec30a9dc20268a8b80f3b9584de1c9f051671cb057dc6ce28f6"}, + {file = "tokenizers-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:31e087e9ee1b8f075b002bfee257e858dc695f955b43903e1bb4aa9f170e37fe"}, + {file = "tokenizers-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c3124fb6f3346cb3d8d775375d3b429bf4dcfc24f739822702009d20a4297990"}, + {file = "tokenizers-0.20.0-cp311-none-win32.whl", hash = "sha256:a4bb8b40ba9eefa621fdcabf04a74aa6038ae3be0c614c6458bd91a4697a452f"}, + {file = "tokenizers-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:2b709d371f1fe60a28ef0c5c67815952d455ca7f34dbe7197eaaed3cc54b658e"}, + {file = "tokenizers-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:15c81a17d0d66f4987c6ca16f4bea7ec253b8c7ed1bb00fdc5d038b1bb56e714"}, + {file = "tokenizers-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a531cdf1fb6dc41c984c785a3b299cb0586de0b35683842a3afbb1e5207f910"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06caabeb4587f8404e0cd9d40f458e9cba3e815c8155a38e579a74ff3e2a4301"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8768f964f23f5b9f50546c0369c75ab3262de926983888bbe8b98be05392a79c"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:626403860152c816f97b649fd279bd622c3d417678c93b4b1a8909b6380b69a8"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c1b88fa9e5ff062326f4bf82681da5a96fca7104d921a6bd7b1e6fcf224af26"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d7e559436a07dc547f22ce1101f26d8b2fad387e28ec8e7e1e3b11695d681d8"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48afb75e50449848964e4a67b0da01261dd3aa8df8daecf10db8fd7f5b076eb"}, + {file = "tokenizers-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:baf5d0e1ff44710a95eefc196dd87666ffc609fd447c5e5b68272a7c3d342a1d"}, + {file = "tokenizers-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e5e56df0e8ed23ba60ae3848c3f069a0710c4b197218fe4f89e27eba38510768"}, + {file = "tokenizers-0.20.0-cp312-none-win32.whl", hash = "sha256:ec53e5ecc142a82432f9c6c677dbbe5a2bfee92b8abf409a9ecb0d425ee0ce75"}, + {file = "tokenizers-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:f18661ece72e39c0dfaa174d6223248a15b457dbd4b0fc07809b8e6d3ca1a234"}, + {file = "tokenizers-0.20.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:f7065b1084d8d1a03dc89d9aad69bcbc8415d4bc123c367063eb32958cd85054"}, + {file = "tokenizers-0.20.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e5d4069e4714e3f7ba0a4d3d44f9d84a432cd4e4aa85c3d7dd1f51440f12e4a1"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:799b808529e54b7e1a36350bda2aeb470e8390e484d3e98c10395cee61d4e3c6"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f9baa027cc8a281ad5f7725a93c204d7a46986f88edbe8ef7357f40a23fb9c7"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:010ec7f3f7a96adc4c2a34a3ada41fa14b4b936b5628b4ff7b33791258646c6b"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98d88f06155335b14fd78e32ee28ca5b2eb30fced4614e06eb14ae5f7fba24ed"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e13eb000ef540c2280758d1b9cfa5fe424b0424ae4458f440e6340a4f18b2638"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fab3cf066ff426f7e6d70435dc28a9ff01b2747be83810e397cba106f39430b0"}, + {file = "tokenizers-0.20.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:39fa3761b30a89368f322e5daf4130dce8495b79ad831f370449cdacfb0c0d37"}, + {file = "tokenizers-0.20.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c8da0fba4d179ddf2607821575998df3c294aa59aa8df5a6646dc64bc7352bce"}, + {file = "tokenizers-0.20.0-cp37-none-win32.whl", hash = "sha256:fada996d6da8cf213f6e3c91c12297ad4f6cdf7a85c2fadcd05ec32fa6846fcd"}, + {file = "tokenizers-0.20.0-cp37-none-win_amd64.whl", hash = "sha256:7d29aad702279e0760c265fcae832e89349078e3418dd329732d4503259fd6bd"}, + {file = "tokenizers-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:099c68207f3ef0227ecb6f80ab98ea74de559f7b124adc7b17778af0250ee90a"}, + {file = "tokenizers-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:68012d8a8cddb2eab3880870d7e2086cb359c7f7a2b03f5795044f5abff4e850"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9253bdd209c6aee168deca7d0e780581bf303e0058f268f9bb06859379de19b6"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f868600ddbcb0545905ed075eb7218a0756bf6c09dae7528ea2f8436ebd2c93"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9643d9c8c5f99b6aba43fd10034f77cc6c22c31f496d2f0ee183047d948fa0"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c375c6a889aeab44734028bc65cc070acf93ccb0f9368be42b67a98e1063d3f6"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e359f852328e254f070bbd09a19a568421d23388f04aad9f2fb7da7704c7228d"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d98b01a309d4387f3b1c1dd68a8b8136af50376cf146c1b7e8d8ead217a5be4b"}, + {file = "tokenizers-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:459f7537119554c2899067dec1ac74a00d02beef6558f4ee2e99513bf6d568af"}, + {file = "tokenizers-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:392b87ec89452628c045c9f2a88bc2a827f4c79e7d84bc3b72752b74c2581f70"}, + {file = "tokenizers-0.20.0-cp38-none-win32.whl", hash = "sha256:55a393f893d2ed4dd95a1553c2e42d4d4086878266f437b03590d3f81984c4fe"}, + {file = "tokenizers-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:30ffe33c5c2f2aab8e9a3340d0110dd9f7ace7eec7362e20a697802306bd8068"}, + {file = "tokenizers-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aa2d4a6fed2a7e3f860c7fc9d48764bb30f2649d83915d66150d6340e06742b8"}, + {file = "tokenizers-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b5ef0f814084a897e9071fc4a868595f018c5c92889197bdc4bf19018769b148"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc1e1b791e8c3bf4c4f265f180dadaff1c957bf27129e16fdd5e5d43c2d3762c"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b69e55e481459c07885263743a0d3c18d52db19bae8226a19bcca4aaa213fff"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806b4d82e27a2512bc23057b2986bc8b85824914286975b84d8105ff40d03d9"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9859e9ef13adf5a473ccab39d31bff9c550606ae3c784bf772b40f615742a24f"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef703efedf4c20488a8eb17637b55973745b27997ff87bad88ed499b397d1144"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eec0061bab94b1841ab87d10831fdf1b48ebaed60e6d66d66dbe1d873f92bf5"}, + {file = "tokenizers-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:980f3d0d7e73f845b69087f29a63c11c7eb924c4ad6b358da60f3db4cf24bdb4"}, + {file = "tokenizers-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c157550a2f3851b29d7fdc9dc059fcf81ff0c0fc49a1e5173a89d533ed043fa"}, + {file = "tokenizers-0.20.0-cp39-none-win32.whl", hash = "sha256:8a3d2f4d08608ec4f9895ec25b4b36a97f05812543190a5f2c3cd19e8f041e5a"}, + {file = "tokenizers-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:d90188d12afd0c75e537f9a1d92f9c7375650188ee4f48fdc76f9e38afbd2251"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d68e15f1815357b059ec266062340c343ea7f98f7f330602df81ffa3474b6122"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:23f9ecec637b9bc80da5f703808d29ed5329e56b5aa8d791d1088014f48afadc"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f830b318ee599e3d0665b3e325f85bc75ee2d2ca6285f52e439dc22b64691580"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3dc750def789cb1de1b5a37657919545e1d9ffa667658b3fa9cb7862407a1b8"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e26e6c755ae884c2ea6135cd215bdd0fccafe4ee62405014b8c3cd19954e3ab9"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a1158c7174f427182e08baa2a8ded2940f2b4a3e94969a85cc9cfd16004cbcea"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:6324826287a3fc198898d3dcf758fe4a8479e42d6039f4c59e2cedd3cf92f64e"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7d8653149405bb0c16feaf9cfee327fdb6aaef9dc2998349fec686f35e81c4e2"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8a2dc1e402a155e97309287ca085c80eb1b7fab8ae91527d3b729181639fa51"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bef67b20aa6e5f7868c42c7c5eae4d24f856274a464ae62e47a0f2cccec3da"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da06e397182ff53789c506c7833220c192952c57e1581a53f503d8d953e2d67e"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:302f7e11a14814028b7fc88c45a41f1bbe9b5b35fd76d6869558d1d1809baa43"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:055ec46e807b875589dfbe3d9259f9a6ee43394fb553b03b3d1e9541662dbf25"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e3144b8acebfa6ae062e8f45f7ed52e4b50fb6c62f93afc8871b525ab9fdcab3"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b52aa3fd14b2a07588c00a19f66511cff5cca8f7266ca3edcdd17f3512ad159f"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b8cf52779ffc5d4d63a0170fbeb512372bad0dd014ce92bbb9149756c831124"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:983a45dd11a876124378dae71d6d9761822199b68a4c73f32873d8cdaf326a5b"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6b819c9a19831ebec581e71a7686a54ab45d90faf3842269a10c11d746de0c"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e738cfd80795fcafcef89c5731c84b05638a4ab3f412f97d5ed7765466576eb1"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c8842c7be2fadb9c9edcee233b1b7fe7ade406c99b0973f07439985c1c1d0683"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e47a82355511c373a4a430c4909dc1e518e00031207b1fec536c49127388886b"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9afbf359004551179a5db19424180c81276682773cff2c5d002f6eaaffe17230"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07eaa8799a92e6af6f472c21a75bf71575de2af3c0284120b7a09297c0de2f3"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0994b2e5fc53a301071806bc4303e4bc3bdc3f490e92a21338146a36746b0872"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6466e0355b603d10e3cc3d282d350b646341b601e50969464a54939f9848d0"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1e86594c2a433cb1ea09cfbe596454448c566e57ee8905bd557e489d93e89986"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3e14cdef1efa96ecead6ea64a891828432c3ebba128bdc0596e3059fea104ef3"}, + {file = "tokenizers-0.20.0.tar.gz", hash = "sha256:39d7acc43f564c274085cafcd1dae9d36f332456de1a31970296a6b8da4eac8d"}, ] [package.dependencies] @@ -3198,13 +2295,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.4" +version = "4.66.5" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, - {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, ] [package.dependencies] @@ -3218,13 +2315,13 @@ telegram = ["requests"] [[package]] name = "typer" -version = "0.12.3" +version = "0.12.4" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" files = [ - {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"}, - {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"}, + {file = "typer-0.12.4-py3-none-any.whl", hash = "sha256:819aa03699f438397e876aa12b0d63766864ecba1b579092cc9fe35d886e34b6"}, + {file = "typer-0.12.4.tar.gz", hash = "sha256:c9c1613ed6a166162705b3347b8d10b661ccc5d95692654d0fb628118f2c34e6"}, ] [package.dependencies] @@ -3235,13 +2332,13 @@ typing-extensions = ">=3.7.4.3" [[package]] name = "types-requests" -version = "2.31.0.20240406" +version = "2.32.0.20240712" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"}, - {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"}, + {file = "types-requests-2.32.0.20240712.tar.gz", hash = "sha256:90c079ff05e549f6bf50e02e910210b98b8ff1ebdd18e19c873cd237737c1358"}, + {file = "types_requests-2.32.0.20240712-py3-none-any.whl", hash = "sha256:f754283e152c752e46e70942fa2a146b5bc70393522257bb85bd1ef7e019dcc3"}, ] [package.dependencies] @@ -3249,126 +2346,24 @@ urllib3 = ">=2" [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, -] - -[[package]] -name = "typing-inspect" -version = "0.9.0" -description = "Runtime inspection utilities for typing module." -optional = false -python-versions = "*" -files = [ - {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, - {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, -] - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "ujson" -version = "5.10.0" -description = "Ultra fast JSON encoder and decoder for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, - {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51"}, - {file = "ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518"}, - {file = "ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f"}, - {file = "ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00"}, - {file = "ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1"}, - {file = "ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f"}, - {file = "ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720"}, - {file = "ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5"}, - {file = "ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e"}, - {file = "ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e"}, - {file = "ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc"}, - {file = "ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287"}, - {file = "ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f"}, - {file = "ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165"}, - {file = "ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539"}, - {file = "ujson-5.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050"}, - {file = "ujson-5.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4"}, - {file = "ujson-5.10.0-cp38-cp38-win32.whl", hash = "sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8"}, - {file = "ujson-5.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc"}, - {file = "ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b"}, - {file = "ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996"}, - {file = "ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9"}, - {file = "ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7"}, - {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -3379,13 +2374,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.29.0" +version = "0.30.6" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"}, - {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"}, + {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, + {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, ] [package.dependencies] @@ -3405,42 +2400,42 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", [[package]] name = "uvloop" -version = "0.19.0" +version = "0.20.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" files = [ - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, - {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, + {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9ebafa0b96c62881d5cafa02d9da2e44c23f9f0cd829f3a32a6aff771449c996"}, + {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:35968fc697b0527a06e134999eef859b4034b37aebca537daeb598b9d45a137b"}, + {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b16696f10e59d7580979b420eedf6650010a4a9c3bd8113f24a103dfdb770b10"}, + {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b04d96188d365151d1af41fa2d23257b674e7ead68cfd61c725a422764062ae"}, + {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94707205efbe809dfa3a0d09c08bef1352f5d3d6612a506f10a319933757c006"}, + {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89e8d33bb88d7263f74dc57d69f0063e06b5a5ce50bb9a6b32f5fcbe655f9e73"}, + {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e50289c101495e0d1bb0bfcb4a60adde56e32f4449a67216a1ab2750aa84f037"}, + {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e237f9c1e8a00e7d9ddaa288e535dc337a39bcbf679f290aee9d26df9e72bce9"}, + {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:746242cd703dc2b37f9d8b9f173749c15e9a918ddb021575a0205ec29a38d31e"}, + {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82edbfd3df39fb3d108fc079ebc461330f7c2e33dbd002d146bf7c445ba6e756"}, + {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:80dc1b139516be2077b3e57ce1cb65bfed09149e1d175e0478e7a987863b68f0"}, + {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f44af67bf39af25db4c1ac27e82e9665717f9c26af2369c404be865c8818dcf"}, + {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4b75f2950ddb6feed85336412b9a0c310a2edbcf4cf931aa5cfe29034829676d"}, + {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:77fbc69c287596880ecec2d4c7a62346bef08b6209749bf6ce8c22bbaca0239e"}, + {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6462c95f48e2d8d4c993a2950cd3d31ab061864d1c226bbf0ee2f1a8f36674b9"}, + {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649c33034979273fa71aa25d0fe120ad1777c551d8c4cd2c0c9851d88fcb13ab"}, + {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a609780e942d43a275a617c0839d85f95c334bad29c4c0918252085113285b5"}, + {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aea15c78e0d9ad6555ed201344ae36db5c63d428818b4b2a42842b3870127c00"}, + {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0e94b221295b5e69de57a1bd4aeb0b3a29f61be6e1b478bb8a69a73377db7ba"}, + {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fee6044b64c965c425b65a4e17719953b96e065c5b7e09b599ff332bb2744bdf"}, + {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:265a99a2ff41a0fd56c19c3838b29bf54d1d177964c300dad388b27e84fd7847"}, + {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10c2956efcecb981bf9cfb8184d27d5d64b9033f917115a960b83f11bfa0d6b"}, + {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e7d61fe8e8d9335fac1bf8d5d82820b4808dd7a43020c149b63a1ada953d48a6"}, + {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2beee18efd33fa6fdb0976e18475a4042cd31c7433c866e8a09ab604c7c22ff2"}, + {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8c36fdf3e02cec92aed2d44f63565ad1522a499c654f07935c8f9d04db69e95"}, + {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0fac7be202596c7126146660725157d4813aa29a4cc990fe51346f75ff8fde7"}, + {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d0fba61846f294bce41eb44d60d58136090ea2b5b99efd21cbdf4e21927c56a"}, + {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95720bae002ac357202e0d866128eb1ac82545bcf0b549b9abe91b5178d9b541"}, + {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:36c530d8fa03bfa7085af54a48f2ca16ab74df3ec7108a46ba82fd8b411a2315"}, + {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e97152983442b499d7a71e44f29baa75b3b02e65d9c44ba53b10338e98dedb66"}, + {file = "uvloop-0.20.0.tar.gz", hash = "sha256:4603ca714a754fc8d9b197e325db25b2ea045385e8a3ad05d3463de725fdf469"}, ] [package.extras] @@ -3449,40 +2444,46 @@ test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)" [[package]] name = "watchdog" -version = "4.0.0" +version = "4.0.2" description = "Filesystem events monitoring" optional = false python-versions = ">=3.8" files = [ - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, - {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, - {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, - {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, - {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, - {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, - {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, - {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"}, + {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"}, + {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"}, + {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"}, + {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"}, ] [package.extras] @@ -3490,86 +2491,98 @@ watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "watchfiles" -version = "0.21.0" +version = "0.23.0" description = "Simple, modern and high performance file watching and code reload in python." optional = false python-versions = ">=3.8" files = [ - {file = "watchfiles-0.21.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:27b4035013f1ea49c6c0b42d983133b136637a527e48c132d368eb19bf1ac6aa"}, - {file = "watchfiles-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c81818595eff6e92535ff32825f31c116f867f64ff8cdf6562cd1d6b2e1e8f3e"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c107ea3cf2bd07199d66f156e3ea756d1b84dfd43b542b2d870b77868c98c03"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d9ac347653ebd95839a7c607608703b20bc07e577e870d824fa4801bc1cb124"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5eb86c6acb498208e7663ca22dbe68ca2cf42ab5bf1c776670a50919a56e64ab"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f564bf68404144ea6b87a78a3f910cc8de216c6b12a4cf0b27718bf4ec38d303"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d0f32ebfaa9c6011f8454994f86108c2eb9c79b8b7de00b36d558cadcedaa3d"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d45d9b699ecbac6c7bd8e0a2609767491540403610962968d258fd6405c17c"}, - {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:aff06b2cac3ef4616e26ba17a9c250c1fe9dd8a5d907d0193f84c499b1b6e6a9"}, - {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d9792dff410f266051025ecfaa927078b94cc7478954b06796a9756ccc7e14a9"}, - {file = "watchfiles-0.21.0-cp310-none-win32.whl", hash = "sha256:214cee7f9e09150d4fb42e24919a1e74d8c9b8a9306ed1474ecaddcd5479c293"}, - {file = "watchfiles-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:1ad7247d79f9f55bb25ab1778fd47f32d70cf36053941f07de0b7c4e96b5d235"}, - {file = "watchfiles-0.21.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:668c265d90de8ae914f860d3eeb164534ba2e836811f91fecc7050416ee70aa7"}, - {file = "watchfiles-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a23092a992e61c3a6a70f350a56db7197242f3490da9c87b500f389b2d01eef"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e7941bbcfdded9c26b0bf720cb7e6fd803d95a55d2c14b4bd1f6a2772230c586"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11cd0c3100e2233e9c53106265da31d574355c288e15259c0d40a4405cbae317"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78f30cbe8b2ce770160d3c08cff01b2ae9306fe66ce899b73f0409dc1846c1b"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6674b00b9756b0af620aa2a3346b01f8e2a3dc729d25617e1b89cf6af4a54eb1"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd7ac678b92b29ba630d8c842d8ad6c555abda1b9ef044d6cc092dacbfc9719d"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c873345680c1b87f1e09e0eaf8cf6c891b9851d8b4d3645e7efe2ec20a20cc7"}, - {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49f56e6ecc2503e7dbe233fa328b2be1a7797d31548e7a193237dcdf1ad0eee0"}, - {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:02d91cbac553a3ad141db016e3350b03184deaafeba09b9d6439826ee594b365"}, - {file = "watchfiles-0.21.0-cp311-none-win32.whl", hash = "sha256:ebe684d7d26239e23d102a2bad2a358dedf18e462e8808778703427d1f584400"}, - {file = "watchfiles-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:4566006aa44cb0d21b8ab53baf4b9c667a0ed23efe4aaad8c227bfba0bf15cbe"}, - {file = "watchfiles-0.21.0-cp311-none-win_arm64.whl", hash = "sha256:c550a56bf209a3d987d5a975cdf2063b3389a5d16caf29db4bdddeae49f22078"}, - {file = "watchfiles-0.21.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:51ddac60b96a42c15d24fbdc7a4bfcd02b5a29c047b7f8bf63d3f6f5a860949a"}, - {file = "watchfiles-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:511f0b034120cd1989932bf1e9081aa9fb00f1f949fbd2d9cab6264916ae89b1"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb92d49dbb95ec7a07511bc9efb0faff8fe24ef3805662b8d6808ba8409a71a"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f92944efc564867bbf841c823c8b71bb0be75e06b8ce45c084b46411475a915"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:642d66b75eda909fd1112d35c53816d59789a4b38c141a96d62f50a3ef9b3360"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d23bcd6c8eaa6324fe109d8cac01b41fe9a54b8c498af9ce464c1aeeb99903d6"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18d5b4da8cf3e41895b34e8c37d13c9ed294954907929aacd95153508d5d89d7"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b8d1eae0f65441963d805f766c7e9cd092f91e0c600c820c764a4ff71a0764c"}, - {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1fd9a5205139f3c6bb60d11f6072e0552f0a20b712c85f43d42342d162be1235"}, - {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a1e3014a625bcf107fbf38eece0e47fa0190e52e45dc6eee5a8265ddc6dc5ea7"}, - {file = "watchfiles-0.21.0-cp312-none-win32.whl", hash = "sha256:9d09869f2c5a6f2d9df50ce3064b3391d3ecb6dced708ad64467b9e4f2c9bef3"}, - {file = "watchfiles-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:18722b50783b5e30a18a8a5db3006bab146d2b705c92eb9a94f78c72beb94094"}, - {file = "watchfiles-0.21.0-cp312-none-win_arm64.whl", hash = "sha256:a3b9bec9579a15fb3ca2d9878deae789df72f2b0fdaf90ad49ee389cad5edab6"}, - {file = "watchfiles-0.21.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:4ea10a29aa5de67de02256a28d1bf53d21322295cb00bd2d57fcd19b850ebd99"}, - {file = "watchfiles-0.21.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:40bca549fdc929b470dd1dbfcb47b3295cb46a6d2c90e50588b0a1b3bd98f429"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9b37a7ba223b2f26122c148bb8d09a9ff312afca998c48c725ff5a0a632145f7"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec8c8900dc5c83650a63dd48c4d1d245343f904c4b64b48798c67a3767d7e165"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ad3fe0a3567c2f0f629d800409cd528cb6251da12e81a1f765e5c5345fd0137"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d353c4cfda586db2a176ce42c88f2fc31ec25e50212650c89fdd0f560ee507b"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83a696da8922314ff2aec02987eefb03784f473281d740bf9170181829133765"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a03651352fc20975ee2a707cd2d74a386cd303cc688f407296064ad1e6d1562"}, - {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ad692bc7792be8c32918c699638b660c0de078a6cbe464c46e1340dadb94c19"}, - {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06247538e8253975bdb328e7683f8515ff5ff041f43be6c40bff62d989b7d0b0"}, - {file = "watchfiles-0.21.0-cp38-none-win32.whl", hash = "sha256:9a0aa47f94ea9a0b39dd30850b0adf2e1cd32a8b4f9c7aa443d852aacf9ca214"}, - {file = "watchfiles-0.21.0-cp38-none-win_amd64.whl", hash = "sha256:8d5f400326840934e3507701f9f7269247f7c026d1b6cfd49477d2be0933cfca"}, - {file = "watchfiles-0.21.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f762a1a85a12cc3484f77eee7be87b10f8c50b0b787bb02f4e357403cad0c0e"}, - {file = "watchfiles-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6e9be3ef84e2bb9710f3f777accce25556f4a71e15d2b73223788d528fcc2052"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4c48a10d17571d1275701e14a601e36959ffada3add8cdbc9e5061a6e3579a5d"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c889025f59884423428c261f212e04d438de865beda0b1e1babab85ef4c0f01"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:66fac0c238ab9a2e72d026b5fb91cb902c146202bbd29a9a1a44e8db7b710b6f"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a21f71885aa2744719459951819e7bf5a906a6448a6b2bbce8e9cc9f2c8128"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c9198c989f47898b2c22201756f73249de3748e0fc9de44adaf54a8b259cc0c"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f57c4461cd24fda22493109c45b3980863c58a25b8bec885ca8bea6b8d4b28"}, - {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:853853cbf7bf9408b404754b92512ebe3e3a83587503d766d23e6bf83d092ee6"}, - {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d5b1dc0e708fad9f92c296ab2f948af403bf201db8fb2eb4c8179db143732e49"}, - {file = "watchfiles-0.21.0-cp39-none-win32.whl", hash = "sha256:59137c0c6826bd56c710d1d2bda81553b5e6b7c84d5a676747d80caf0409ad94"}, - {file = "watchfiles-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:6cb8fdc044909e2078c248986f2fc76f911f72b51ea4a4fbbf472e01d14faa58"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab03a90b305d2588e8352168e8c5a1520b721d2d367f31e9332c4235b30b8994"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:927c589500f9f41e370b0125c12ac9e7d3a2fd166b89e9ee2828b3dda20bfe6f"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd467213195e76f838caf2c28cd65e58302d0254e636e7c0fca81efa4a2e62c"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02b73130687bc3f6bb79d8a170959042eb56eb3a42df3671c79b428cd73f17cc"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:08dca260e85ffae975448e344834d765983237ad6dc308231aa16e7933db763e"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ccceb50c611c433145502735e0370877cced72a6c70fd2410238bcbc7fe51d8"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57d430f5fb63fea141ab71ca9c064e80de3a20b427ca2febcbfcef70ff0ce895"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dd5fad9b9c0dd89904bbdea978ce89a2b692a7ee8a0ce19b940e538c88a809c"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:be6dd5d52b73018b21adc1c5d28ac0c68184a64769052dfeb0c5d9998e7f56a2"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b3cab0e06143768499384a8a5efb9c4dc53e19382952859e4802f294214f36ec"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6ed10c2497e5fedadf61e465b3ca12a19f96004c15dcffe4bd442ebadc2d85"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43babacef21c519bc6631c5fce2a61eccdfc011b4bcb9047255e9620732c8097"}, - {file = "watchfiles-0.21.0.tar.gz", hash = "sha256:c76c635fabf542bb78524905718c39f736a98e5ab25b23ec6d4abede1a85a6a3"}, + {file = "watchfiles-0.23.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:bee8ce357a05c20db04f46c22be2d1a2c6a8ed365b325d08af94358e0688eeb4"}, + {file = "watchfiles-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ccd3011cc7ee2f789af9ebe04745436371d36afe610028921cab9f24bb2987b"}, + {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb02d41c33be667e6135e6686f1bb76104c88a312a18faa0ef0262b5bf7f1a0f"}, + {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cf12ac34c444362f3261fb3ff548f0037ddd4c5bb85f66c4be30d2936beb3c5"}, + {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0b2c25040a3c0ce0e66c7779cc045fdfbbb8d59e5aabfe033000b42fe44b53e"}, + {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf2be4b9eece4f3da8ba5f244b9e51932ebc441c0867bd6af46a3d97eb068d6"}, + {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40cb8fa00028908211eb9f8d47744dca21a4be6766672e1ff3280bee320436f1"}, + {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f48c917ffd36ff9a5212614c2d0d585fa8b064ca7e66206fb5c095015bc8207"}, + {file = "watchfiles-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9d183e3888ada88185ab17064079c0db8c17e32023f5c278d7bf8014713b1b5b"}, + {file = "watchfiles-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9837edf328b2805346f91209b7e660f65fb0e9ca18b7459d075d58db082bf981"}, + {file = "watchfiles-0.23.0-cp310-none-win32.whl", hash = "sha256:296e0b29ab0276ca59d82d2da22cbbdb39a23eed94cca69aed274595fb3dfe42"}, + {file = "watchfiles-0.23.0-cp310-none-win_amd64.whl", hash = "sha256:4ea756e425ab2dfc8ef2a0cb87af8aa7ef7dfc6fc46c6f89bcf382121d4fff75"}, + {file = "watchfiles-0.23.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e397b64f7aaf26915bf2ad0f1190f75c855d11eb111cc00f12f97430153c2eab"}, + {file = "watchfiles-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4ac73b02ca1824ec0a7351588241fd3953748d3774694aa7ddb5e8e46aef3e3"}, + {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130a896d53b48a1cecccfa903f37a1d87dbb74295305f865a3e816452f6e49e4"}, + {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c5e7803a65eb2d563c73230e9d693c6539e3c975ccfe62526cadde69f3fda0cf"}, + {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1aa4cc85202956d1a65c88d18c7b687b8319dbe6b1aec8969784ef7a10e7d1a"}, + {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87f889f6e58849ddb7c5d2cb19e2e074917ed1c6e3ceca50405775166492cca8"}, + {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37fd826dac84c6441615aa3f04077adcc5cac7194a021c9f0d69af20fb9fa788"}, + {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee7db6e36e7a2c15923072e41ea24d9a0cf39658cb0637ecc9307b09d28827e1"}, + {file = "watchfiles-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2368c5371c17fdcb5a2ea71c5c9d49f9b128821bfee69503cc38eae00feb3220"}, + {file = "watchfiles-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:857af85d445b9ba9178db95658c219dbd77b71b8264e66836a6eba4fbf49c320"}, + {file = "watchfiles-0.23.0-cp311-none-win32.whl", hash = "sha256:1d636c8aeb28cdd04a4aa89030c4b48f8b2954d8483e5f989774fa441c0ed57b"}, + {file = "watchfiles-0.23.0-cp311-none-win_amd64.whl", hash = "sha256:46f1d8069a95885ca529645cdbb05aea5837d799965676e1b2b1f95a4206313e"}, + {file = "watchfiles-0.23.0-cp311-none-win_arm64.whl", hash = "sha256:e495ed2a7943503766c5d1ff05ae9212dc2ce1c0e30a80d4f0d84889298fa304"}, + {file = "watchfiles-0.23.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1db691bad0243aed27c8354b12d60e8e266b75216ae99d33e927ff5238d270b5"}, + {file = "watchfiles-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62d2b18cb1edaba311fbbfe83fb5e53a858ba37cacb01e69bc20553bb70911b8"}, + {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e087e8fdf1270d000913c12e6eca44edd02aad3559b3e6b8ef00f0ce76e0636f"}, + {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd41d5c72417b87c00b1b635738f3c283e737d75c5fa5c3e1c60cd03eac3af77"}, + {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e5f3ca0ff47940ce0a389457b35d6df601c317c1e1a9615981c474452f98de1"}, + {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6991e3a78f642368b8b1b669327eb6751439f9f7eaaa625fae67dd6070ecfa0b"}, + {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f7252f52a09f8fa5435dc82b6af79483118ce6bd51eb74e6269f05ee22a7b9f"}, + {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e01bcb8d767c58865207a6c2f2792ad763a0fe1119fb0a430f444f5b02a5ea0"}, + {file = "watchfiles-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8e56fbcdd27fce061854ddec99e015dd779cae186eb36b14471fc9ae713b118c"}, + {file = "watchfiles-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bd3e2d64500a6cad28bcd710ee6269fbeb2e5320525acd0cfab5f269ade68581"}, + {file = "watchfiles-0.23.0-cp312-none-win32.whl", hash = "sha256:eb99c954291b2fad0eff98b490aa641e128fbc4a03b11c8a0086de8b7077fb75"}, + {file = "watchfiles-0.23.0-cp312-none-win_amd64.whl", hash = "sha256:dccc858372a56080332ea89b78cfb18efb945da858fabeb67f5a44fa0bcb4ebb"}, + {file = "watchfiles-0.23.0-cp312-none-win_arm64.whl", hash = "sha256:6c21a5467f35c61eafb4e394303720893066897fca937bade5b4f5877d350ff8"}, + {file = "watchfiles-0.23.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ba31c32f6b4dceeb2be04f717811565159617e28d61a60bb616b6442027fd4b9"}, + {file = "watchfiles-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:85042ab91814fca99cec4678fc063fb46df4cbb57b4835a1cc2cb7a51e10250e"}, + {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24655e8c1c9c114005c3868a3d432c8aa595a786b8493500071e6a52f3d09217"}, + {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b1a950ab299a4a78fd6369a97b8763732bfb154fdb433356ec55a5bce9515c1"}, + {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8d3c5cd327dd6ce0edfc94374fb5883d254fe78a5e9d9dfc237a1897dc73cd1"}, + {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ff785af8bacdf0be863ec0c428e3288b817e82f3d0c1d652cd9c6d509020dd0"}, + {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02b7ba9d4557149410747353e7325010d48edcfe9d609a85cb450f17fd50dc3d"}, + {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a1b05c0afb2cd2f48c1ed2ae5487b116e34b93b13074ed3c22ad5c743109f0"}, + {file = "watchfiles-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:109a61763e7318d9f821b878589e71229f97366fa6a5c7720687d367f3ab9eef"}, + {file = "watchfiles-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9f8e6bb5ac007d4a4027b25f09827ed78cbbd5b9700fd6c54429278dacce05d1"}, + {file = "watchfiles-0.23.0-cp313-none-win32.whl", hash = "sha256:f46c6f0aec8d02a52d97a583782d9af38c19a29900747eb048af358a9c1d8e5b"}, + {file = "watchfiles-0.23.0-cp313-none-win_amd64.whl", hash = "sha256:f449afbb971df5c6faeb0a27bca0427d7b600dd8f4a068492faec18023f0dcff"}, + {file = "watchfiles-0.23.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:2dddc2487d33e92f8b6222b5fb74ae2cfde5e8e6c44e0248d24ec23befdc5366"}, + {file = "watchfiles-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e75695cc952e825fa3e0684a7f4a302f9128721f13eedd8dbd3af2ba450932b8"}, + {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2537ef60596511df79b91613a5bb499b63f46f01a11a81b0a2b0dedf645d0a9c"}, + {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20b423b58f5fdde704a226b598a2d78165fe29eb5621358fe57ea63f16f165c4"}, + {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b98732ec893975455708d6fc9a6daab527fc8bbe65be354a3861f8c450a632a4"}, + {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee1f5fcbf5bc33acc0be9dd31130bcba35d6d2302e4eceafafd7d9018c7755ab"}, + {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8f195338a5a7b50a058522b39517c50238358d9ad8284fd92943643144c0c03"}, + {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:524fcb8d59b0dbee2c9b32207084b67b2420f6431ed02c18bd191e6c575f5c48"}, + {file = "watchfiles-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0eff099a4df36afaa0eea7a913aa64dcf2cbd4e7a4f319a73012210af4d23810"}, + {file = "watchfiles-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a8323daae27ea290ba3350c70c836c0d2b0fb47897fa3b0ca6a5375b952b90d3"}, + {file = "watchfiles-0.23.0-cp38-none-win32.whl", hash = "sha256:aafea64a3ae698695975251f4254df2225e2624185a69534e7fe70581066bc1b"}, + {file = "watchfiles-0.23.0-cp38-none-win_amd64.whl", hash = "sha256:c846884b2e690ba62a51048a097acb6b5cd263d8bd91062cd6137e2880578472"}, + {file = "watchfiles-0.23.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a753993635eccf1ecb185dedcc69d220dab41804272f45e4aef0a67e790c3eb3"}, + {file = "watchfiles-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6bb91fa4d0b392f0f7e27c40981e46dda9eb0fbc84162c7fb478fe115944f491"}, + {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1f67312efa3902a8e8496bfa9824d3bec096ff83c4669ea555c6bdd213aa516"}, + {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7ca6b71dcc50d320c88fb2d88ecd63924934a8abc1673683a242a7ca7d39e781"}, + {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aec5c29915caf08771d2507da3ac08e8de24a50f746eb1ed295584ba1820330"}, + {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1733b9bc2c8098c6bdb0ff7a3d7cb211753fecb7bd99bdd6df995621ee1a574b"}, + {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02ff5d7bd066c6a7673b17c8879cd8ee903078d184802a7ee851449c43521bdd"}, + {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18e2de19801b0eaa4c5292a223effb7cfb43904cb742c5317a0ac686ed604765"}, + {file = "watchfiles-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8ada449e22198c31fb013ae7e9add887e8d2bd2335401abd3cbc55f8c5083647"}, + {file = "watchfiles-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3af1b05361e1cc497bf1be654a664750ae61f5739e4bb094a2be86ec8c6db9b6"}, + {file = "watchfiles-0.23.0-cp39-none-win32.whl", hash = "sha256:486bda18be5d25ab5d932699ceed918f68eb91f45d018b0343e3502e52866e5e"}, + {file = "watchfiles-0.23.0-cp39-none-win_amd64.whl", hash = "sha256:d2d42254b189a346249424fb9bb39182a19289a2409051ee432fb2926bad966a"}, + {file = "watchfiles-0.23.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9265cf87a5b70147bfb2fec14770ed5b11a5bb83353f0eee1c25a81af5abfe"}, + {file = "watchfiles-0.23.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9f02a259fcbbb5fcfe7a0805b1097ead5ba7a043e318eef1db59f93067f0b49b"}, + {file = "watchfiles-0.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebaebb53b34690da0936c256c1cdb0914f24fb0e03da76d185806df9328abed"}, + {file = "watchfiles-0.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd257f98cff9c6cb39eee1a83c7c3183970d8a8d23e8cf4f47d9a21329285cee"}, + {file = "watchfiles-0.23.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aba037c1310dd108411d27b3d5815998ef0e83573e47d4219f45753c710f969f"}, + {file = "watchfiles-0.23.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:a96ac14e184aa86dc43b8a22bb53854760a58b2966c2b41580de938e9bf26ed0"}, + {file = "watchfiles-0.23.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11698bb2ea5e991d10f1f4f83a39a02f91e44e4bd05f01b5c1ec04c9342bf63c"}, + {file = "watchfiles-0.23.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efadd40fca3a04063d40c4448c9303ce24dd6151dc162cfae4a2a060232ebdcb"}, + {file = "watchfiles-0.23.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:556347b0abb4224c5ec688fc58214162e92a500323f50182f994f3ad33385dcb"}, + {file = "watchfiles-0.23.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1cf7f486169986c4b9d34087f08ce56a35126600b6fef3028f19ca16d5889071"}, + {file = "watchfiles-0.23.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f18de0f82c62c4197bea5ecf4389288ac755896aac734bd2cc44004c56e4ac47"}, + {file = "watchfiles-0.23.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:532e1f2c491274d1333a814e4c5c2e8b92345d41b12dc806cf07aaff786beb66"}, + {file = "watchfiles-0.23.0.tar.gz", hash = "sha256:9338ade39ff24f8086bb005d16c29f8e9f19e55b18dcb04dfa26fcbc09da497b"}, ] [package.dependencies] @@ -3593,83 +2606,97 @@ test = ["websockets"] [[package]] name = "websockets" -version = "12.0" +version = "13.0" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.8" files = [ - {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, - {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, - {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, - {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, - {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, - {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, - {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, - {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, - {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, - {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, - {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, - {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, - {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, - {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, - {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, - {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, - {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, - {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, - {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, - {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, - {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, - {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, - {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, - {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, - {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, + {file = "websockets-13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ad4fa707ff9e2ffee019e946257b5300a45137a58f41fbd9a4db8e684ab61528"}, + {file = "websockets-13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6fd757f313c13c34dae9f126d3ba4cf97175859c719e57c6a614b781c86b617e"}, + {file = "websockets-13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cbac2eb7ce0fac755fb983c9247c4a60c4019bcde4c0e4d167aeb17520cc7ef1"}, + {file = "websockets-13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4b83cf7354cbbc058e97b3e545dceb75b8d9cf17fd5a19db419c319ddbaaf7a"}, + {file = "websockets-13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9202c0010c78fad1041e1c5285232b6508d3633f92825687549540a70e9e5901"}, + {file = "websockets-13.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6566e79c8c7cbea75ec450f6e1828945fc5c9a4769ceb1c7b6e22470539712"}, + {file = "websockets-13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e7fcad070dcd9ad37a09d89a4cbc2a5e3e45080b88977c0da87b3090f9f55ead"}, + {file = "websockets-13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a8f7d65358a25172db00c69bcc7df834155ee24229f560d035758fd6613111a"}, + {file = "websockets-13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:63b702fb31e3f058f946ccdfa551f4d57a06f7729c369e8815eb18643099db37"}, + {file = "websockets-13.0-cp310-cp310-win32.whl", hash = "sha256:3a20cf14ba7b482c4a1924b5e061729afb89c890ca9ed44ac4127c6c5986e424"}, + {file = "websockets-13.0-cp310-cp310-win_amd64.whl", hash = "sha256:587245f0704d0bb675f919898d7473e8827a6d578e5a122a21756ca44b811ec8"}, + {file = "websockets-13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:06df8306c241c235075d2ae77367038e701e53bc8c1bb4f6644f4f53aa6dedd0"}, + {file = "websockets-13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85a1f92a02f0b8c1bf02699731a70a8a74402bb3f82bee36e7768b19a8ed9709"}, + {file = "websockets-13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9ed02c604349068d46d87ef4c2012c112c791f2bec08671903a6bb2bd9c06784"}, + {file = "websockets-13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b89849171b590107f6724a7b0790736daead40926ddf47eadf998b4ff51d6414"}, + {file = "websockets-13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:939a16849d71203628157a5e4a495da63967c744e1e32018e9b9e2689aca64d4"}, + {file = "websockets-13.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad818cdac37c0ad4c58e51cb4964eae4f18b43c4a83cb37170b0d90c31bd80cf"}, + {file = "websockets-13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cbfe82a07596a044de78bb7a62519e71690c5812c26c5f1d4b877e64e4f46309"}, + {file = "websockets-13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e07e76c49f39c5b45cbd7362b94f001ae209a3ea4905ae9a09cfd53b3c76373d"}, + {file = "websockets-13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:372f46a0096cfda23c88f7e42349a33f8375e10912f712e6b496d3a9a557290f"}, + {file = "websockets-13.0-cp311-cp311-win32.whl", hash = "sha256:376a43a4fd96725f13450d3d2e98f4f36c3525c562ab53d9a98dd2950dca9a8a"}, + {file = "websockets-13.0-cp311-cp311-win_amd64.whl", hash = "sha256:2be1382a4daa61e2f3e2be3b3c86932a8db9d1f85297feb6e9df22f391f94452"}, + {file = "websockets-13.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5407c34776b9b77bd89a5f95eb0a34aaf91889e3f911c63f13035220eb50107"}, + {file = "websockets-13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4782ec789f059f888c1e8fdf94383d0e64b531cffebbf26dd55afd53ab487ca4"}, + {file = "websockets-13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c8feb8e19ef65c9994e652c5b0324abd657bedd0abeb946fb4f5163012c1e730"}, + {file = "websockets-13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f3d2e20c442b58dbac593cb1e02bc02d149a86056cc4126d977ad902472e3b"}, + {file = "websockets-13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e39d393e0ab5b8bd01717cc26f2922026050188947ff54fe6a49dc489f7750b7"}, + {file = "websockets-13.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f661a4205741bdc88ac9c2b2ec003c72cee97e4acd156eb733662ff004ba429"}, + {file = "websockets-13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:384129ad0490e06bab2b98c1da9b488acb35bb11e2464c728376c6f55f0d45f3"}, + {file = "websockets-13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:df5c0eff91f61b8205a6c9f7b255ff390cdb77b61c7b41f79ca10afcbb22b6cb"}, + {file = "websockets-13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:02cc9bb1a887dac0e08bf657c5d00aa3fac0d03215d35a599130c2034ae6663a"}, + {file = "websockets-13.0-cp312-cp312-win32.whl", hash = "sha256:d9726d2c9bd6aed8cb994d89b3910ca0079406edce3670886ec828a73e7bdd53"}, + {file = "websockets-13.0-cp312-cp312-win_amd64.whl", hash = "sha256:fa0839f35322f7b038d8adcf679e2698c3a483688cc92e3bd15ee4fb06669e9a"}, + {file = "websockets-13.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:da7e501e59857e8e3e9d10586139dc196b80445a591451ca9998aafba1af5278"}, + {file = "websockets-13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a00e1e587c655749afb5b135d8d3edcfe84ec6db864201e40a882e64168610b3"}, + {file = "websockets-13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a7fbf2a8fe7556a8f4e68cb3e736884af7bf93653e79f6219f17ebb75e97d8f0"}, + {file = "websockets-13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ea9c9c7443a97ea4d84d3e4d42d0e8c4235834edae652993abcd2aff94affd7"}, + {file = "websockets-13.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35c2221b539b360203f3f9ad168e527bf16d903e385068ae842c186efb13d0ea"}, + {file = "websockets-13.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:358d37c5c431dd050ffb06b4b075505aae3f4f795d7fff9794e5ed96ce99b998"}, + {file = "websockets-13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:038e7a0f1bfafc7bf52915ab3506b7a03d1e06381e9f60440c856e8918138151"}, + {file = "websockets-13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fd038bc9e2c134847f1e0ce3191797fad110756e690c2fdd9702ed34e7a43abb"}, + {file = "websockets-13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93b8c2008f372379fb6e5d2b3f7c9ec32f7b80316543fd3a5ace6610c5cde1b0"}, + {file = "websockets-13.0-cp313-cp313-win32.whl", hash = "sha256:851fd0afb3bc0b73f7c5b5858975d42769a5fdde5314f4ef2c106aec63100687"}, + {file = "websockets-13.0-cp313-cp313-win_amd64.whl", hash = "sha256:7d14901fdcf212804970c30ab9ee8f3f0212e620c7ea93079d6534863444fb4e"}, + {file = "websockets-13.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ae7a519a56a714f64c3445cabde9fc2fc927e7eae44f413eae187cddd9e54178"}, + {file = "websockets-13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5575031472ca87302aeb2ce2c2349f4c6ea978c86a9d1289bc5d16058ad4c10a"}, + {file = "websockets-13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9895df6cd0bfe79d09bcd1dbdc03862846f26fbd93797153de954306620c1d00"}, + {file = "websockets-13.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4de299c947a54fca9ce1c5fd4a08eb92ffce91961becb13bd9195f7c6e71b47"}, + {file = "websockets-13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05c25f7b849702950b6fd0e233989bb73a0d2bc83faa3b7233313ca395205f6d"}, + {file = "websockets-13.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ede95125a30602b1691a4b1da88946bf27dae283cf30f22cd2cb8ca4b2e0d119"}, + {file = "websockets-13.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:addf0a16e4983280efed272d8cb3b2e05f0051755372461e7d966b80a6554e16"}, + {file = "websockets-13.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:06b3186e97bf9a33921fa60734d5ed90f2a9b407cce8d23c7333a0984049ef61"}, + {file = "websockets-13.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:eae368cac85adc4c7dc3b0d5f84ffcca609d658db6447387300478e44db70796"}, + {file = "websockets-13.0-cp38-cp38-win32.whl", hash = "sha256:337837ac788d955728b1ab01876d72b73da59819a3388e1c5e8e05c3999f1afa"}, + {file = "websockets-13.0-cp38-cp38-win_amd64.whl", hash = "sha256:f66e00e42f25ca7e91076366303e11c82572ca87cc5aae51e6e9c094f315ab41"}, + {file = "websockets-13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:94c1c02721139fe9940b38d28fb15b4b782981d800d5f40f9966264fbf23dcc8"}, + {file = "websockets-13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bd4ba86513430513e2aa25a441bb538f6f83734dc368a2c5d18afdd39097aa33"}, + {file = "websockets-13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a1ab8f0e0cadc5be5f3f9fa11a663957fecbf483d434762c8dfb8aa44948944a"}, + {file = "websockets-13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3670def5d3dfd5af6f6e2b3b243ea8f1f72d8da1ef927322f0703f85c90d9603"}, + {file = "websockets-13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6058b6be92743358885ad6dcdecb378fde4a4c74d4dd16a089d07580c75a0e80"}, + {file = "websockets-13.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516062a0a8ef5ecbfa4acbaec14b199fc070577834f9fe3d40800a99f92523ca"}, + {file = "websockets-13.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:da7e918d82e7bdfc6f66d31febe1b2e28a1ca3387315f918de26f5e367f61572"}, + {file = "websockets-13.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9cc7f35dcb49a4e32db82a849fcc0714c4d4acc9d2273aded2d61f87d7f660b7"}, + {file = "websockets-13.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f5737c53eb2c8ed8f64b50d3dafd3c1dae739f78aa495a288421ac1b3de82717"}, + {file = "websockets-13.0-cp39-cp39-win32.whl", hash = "sha256:265e1f0d3f788ce8ef99dca591a1aec5263b26083ca0934467ad9a1d1181067c"}, + {file = "websockets-13.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d70c89e3d3b347a7c4d3c33f8d323f0584c9ceb69b82c2ef8a174ca84ea3d4a"}, + {file = "websockets-13.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:602cbd010d8c21c8475f1798b705bb18567eb189c533ab5ef568bc3033fdf417"}, + {file = "websockets-13.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:bf8eb5dca4f484a60f5327b044e842e0d7f7cdbf02ea6dc4a4f811259f1f1f0b"}, + {file = "websockets-13.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89d795c1802d99a643bf689b277e8604c14b5af1bc0a31dade2cd7a678087212"}, + {file = "websockets-13.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:788bc841d250beccff67a20a5a53a15657a60111ef9c0c0a97fbdd614fae0fe2"}, + {file = "websockets-13.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7334752052532c156d28b8eaf3558137e115c7871ea82adff69b6d94a7bee273"}, + {file = "websockets-13.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7a1963302947332c3039e3f66209ec73b1626f8a0191649e0713c391e9f5b0d"}, + {file = "websockets-13.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2e1cf4e1eb84b4fd74a47688e8b0940c89a04ad9f6937afa43d468e71128cd68"}, + {file = "websockets-13.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:c026ee729c4ce55708a14b839ba35086dfae265fc12813b62d34ce33f4980c1c"}, + {file = "websockets-13.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5f9d23fbbf96eefde836d9692670bfc89e2d159f456d499c5efcf6a6281c1af"}, + {file = "websockets-13.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ad684cb7efce227d756bae3e8484f2e56aa128398753b54245efdfbd1108f2c"}, + {file = "websockets-13.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1e10b3fbed7be4a59831d3a939900e50fcd34d93716e433d4193a4d0d1d335d"}, + {file = "websockets-13.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d42a818e634f789350cd8fb413a3f5eec1cf0400a53d02062534c41519f5125c"}, + {file = "websockets-13.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5ba5e9b332267d0f2c33ede390061850f1ac3ee6cd1bdcf4c5ea33ead971966"}, + {file = "websockets-13.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f9af457ed593e35f467140d8b61d425495b127744a9d65d45a366f8678449a23"}, + {file = "websockets-13.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcea3eb58c09c3a31cc83b45c06d5907f02ddaf10920aaa6443975310f699b95"}, + {file = "websockets-13.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c210d1460dc8d326ffdef9703c2f83269b7539a1690ad11ae04162bc1878d33d"}, + {file = "websockets-13.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b32f38bc81170fd56d0482d505b556e52bf9078b36819a8ba52624bd6667e39e"}, + {file = "websockets-13.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:81a11a1ddd5320429db47c04d35119c3e674d215173d87aaeb06ae80f6e9031f"}, + {file = "websockets-13.0-py3-none-any.whl", hash = "sha256:dbbac01e80aee253d44c4f098ab3cc17c822518519e869b284cfbb8cd16cc9de"}, + {file = "websockets-13.0.tar.gz", hash = "sha256:b7bf950234a482b7461afdb2ec99eee3548ec4d53f418c7990bb79c620476602"}, ] [[package]] @@ -3751,125 +2778,22 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] -[[package]] -name = "yarl" -version = "1.9.4" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - [[package]] name = "zipp" -version = "3.18.1" +version = "3.20.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, - {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4" -content-hash = "56f6921322b7a962216a1aae8aad9837104900ebaeca477fe24c5881e85af50b" +content-hash = "e45811e74878a9b652fef6ee06b10ad2d9e2cc33071bc8413bf2450aa17e47b7" diff --git a/libs/partners/chroma/pyproject.toml b/libs/partners/chroma/pyproject.toml index 485bfc5203488..593b57307d5c2 100644 --- a/libs/partners/chroma/pyproject.toml +++ b/libs/partners/chroma/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = [ "poetry-core>=1.0.0",] +requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.poetry] @@ -12,7 +12,7 @@ repository = "https://github.com/langchain-ai/langchain" license = "MIT" [tool.mypy] -disallow_untyped_defs = "True" +disallow_untyped_defs = true [tool.poetry.urls] "Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/chroma" @@ -30,21 +30,21 @@ version = "^1.26.0" python = ">=3.12" [tool.ruff.lint] -select = [ "E", "F", "I", "T201", "D",] +select = ["E", "F", "I", "T201", "D"] [tool.coverage.run] -omit = [ "tests/*",] +omit = ["tests/*"] [tool.pytest.ini_options] addopts = " --strict-markers --strict-config --durations=5" -markers = [ "requires: mark tests as requiring a specific library", "asyncio: mark tests as requiring asyncio", "compile: mark placeholder test used to compile integration tests without running them",] +markers = [ + "requires: mark tests as requiring a specific library", + "asyncio: mark tests as requiring asyncio", + "compile: mark placeholder test used to compile integration tests without running them", +] [tool.poetry.dependencies.chromadb] -version = ">=0.4.0,<0.6.0" - -[tool.poetry.dependencies.fastapi] -version = ">=0.95.2,<1" -optional = true +version = ">=0.4.0,<0.6.0,!=0.5.4,!=0.5.5" [tool.poetry.group.test] optional = true @@ -55,6 +55,8 @@ optional = true [tool.poetry.group.test_integration] optional = true +[tool.poetry.group.test_integration.dependencies] + [tool.poetry.group.lint] optional = true @@ -65,7 +67,7 @@ optional = true convention = "google" [tool.ruff.lint.per-file-ignores] -"tests/**" = [ "D",] +"tests/**" = ["D"] [tool.poetry.group.test.dependencies] pytest = "^7.3.0" @@ -89,34 +91,10 @@ types-requests = "^2.31.0.20240406" path = "../../core" develop = true -[tool.poetry.group.test.dependencies.langchain-community] -path = "../../community" -develop = true - -[tool.poetry.group.test_integration.dependencies.langchain-openai] -path = "../openai" -develop = true - [tool.poetry.group.dev.dependencies.langchain-core] path = "../../core" develop = true -[tool.poetry.group.dev.dependencies.langchain-community] -path = "../../community" -develop = true - [tool.poetry.group.typing.dependencies.langchain-core] path = "../../core" develop = true - -[tool.poetry.group.typing.dependencies.langchain-community] -path = "../../community" -develop = true - -[tool.poetry.group.typing.dependencies.langchain-text-splitters] -path = "../../text-splitters" -develop = true - -[tool.poetry.group.typing.dependencies.langchain] -path = "../../langchain" -develop = true diff --git a/libs/partners/chroma/tests/integration_tests/test_vectorstores.py b/libs/partners/chroma/tests/integration_tests/test_vectorstores.py index 8764a06e156c1..921ed22c6e16c 100644 --- a/libs/partners/chroma/tests/integration_tests/test_vectorstores.py +++ b/libs/partners/chroma/tests/integration_tests/test_vectorstores.py @@ -346,7 +346,7 @@ def test_chroma_large_batch() -> None: "my_collection", embedding_function=embedding_function.embed_documents, # type: ignore ) - docs = ["This is a test document"] * (client.max_batch_size + 100) + docs = ["This is a test document"] * (client.max_batch_size + 100) # type: ignore db = Chroma.from_texts( client=client, collection_name=col.name, @@ -374,7 +374,7 @@ def test_chroma_large_batch_update() -> None: "my_collection", embedding_function=embedding_function.embed_documents, # type: ignore ) - docs = ["This is a test document"] * (client.max_batch_size + 100) + docs = ["This is a test document"] * (client.max_batch_size + 100) # type: ignore ids = [str(uuid.uuid4()) for _ in range(len(docs))] db = Chroma.from_texts( client=client, From f878df404fdfd53ab72b91cfa3b2dc5497eaec06 Mon Sep 17 00:00:00 2001 From: Erick Friis Date: Tue, 20 Aug 2024 16:24:32 -0700 Subject: [PATCH 30/80] partners/chroma: release 0.1.3 (#25599) --- libs/partners/chroma/pyproject.toml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/libs/partners/chroma/pyproject.toml b/libs/partners/chroma/pyproject.toml index 593b57307d5c2..91ba52d02edc4 100644 --- a/libs/partners/chroma/pyproject.toml +++ b/libs/partners/chroma/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "langchain-chroma" -version = "0.1.2" +version = "0.1.3" description = "An integration package connecting Chroma and LangChain" authors = [] readme = "README.md" @@ -77,24 +77,30 @@ syrupy = "^4.0.2" pytest-watcher = "^0.3.4" pytest-asyncio = "^0.21.1" + [tool.poetry.group.codespell.dependencies] codespell = "^2.2.0" + [tool.poetry.group.lint.dependencies] ruff = "^0.5" + [tool.poetry.group.typing.dependencies] mypy = "^1.10" types-requests = "^2.31.0.20240406" + [tool.poetry.group.test.dependencies.langchain-core] path = "../../core" develop = true + [tool.poetry.group.dev.dependencies.langchain-core] path = "../../core" develop = true + [tool.poetry.group.typing.dependencies.langchain-core] path = "../../core" develop = true From be27e1787fbd14979631fe3fc5cdd76b156ce898 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Tue, 20 Aug 2024 17:39:50 -0700 Subject: [PATCH 31/80] docs: few-shot conceptual guide (#25596) Co-authored-by: ccurme Co-authored-by: jakerachleff --- docs/docs/concepts.mdx | 89 +++++++++++++++++++++++++++++++++++++++--- 1 file changed, 84 insertions(+), 5 deletions(-) diff --git a/docs/docs/concepts.mdx b/docs/docs/concepts.mdx index f9180272c3167..8ddeb85372608 100644 --- a/docs/docs/concepts.mdx +++ b/docs/docs/concepts.mdx @@ -209,7 +209,7 @@ Some language models take a list of messages as input and return a message. There are a few different types of messages. All messages have a `role`, `content`, and `response_metadata` property. -The `role` describes WHO is saying the message. +The `role` describes WHO is saying the message. The standard roles are "user", "assistant", "system", and "tool". LangChain has different message classes for different roles. The `content` property describes the content of the message. @@ -218,13 +218,16 @@ This can be a few different things: - A string (most models deal this type of content) - A List of dictionaries (this is used for multimodal input, where the dictionary contains information about that input type and that input location) +Optionally, messages can have a `name` property which allows for differentiating between multiple speakers with the same role. +For example, if there are two users in the chat history it can be useful to differentiate between them. Not all models support this. + #### HumanMessage -This represents a message from the user. +This represents a message with role "user". #### AIMessage -This represents a message from the model. In addition to the `content` property, these messages also have: +This represents a message with role "assistant". In addition to the `content` property, these messages also have: **`response_metadata`** @@ -244,11 +247,11 @@ This property returns a list of `ToolCall`s. A `ToolCall` is a dictionary with t #### SystemMessage -This represents a system message, which tells the model how to behave. Not every model provider supports this. +This represents a message with role "system", which tells the model how to behave. Not every model provider supports this. #### ToolMessage -This represents the result of a tool call. In addition to `role` and `content`, this message has: +This represents a message with role "tool", which contains the result of calling a tool. In addition to `role` and `content`, this message has: - a `tool_call_id` field which conveys the id of the call to the tool that was called to produce this result. - an `artifact` field which can be used to pass along arbitrary artifacts of the tool execution which are useful to track but which should not be sent to the model. @@ -343,6 +346,7 @@ For specifics on how to use prompt templates, see the [relevant how-to guides he ### Example selectors One common prompting technique for achieving better performance is to include examples as part of the prompt. +This is known as [few-shot prompting](/docs/concepts/#few-shot-prompting). This gives the language model concrete examples of how it should behave. Sometimes these examples are hardcoded into the prompt, but for more advanced situations it may be nice to dynamically select them. Example Selectors are classes responsible for selecting and then formatting examples into prompts. @@ -1101,6 +1105,81 @@ The following how-to guides are good practical resources for using function/tool For a full list of model providers that support tool calling, [see this table](/docs/integrations/chat/#advanced-features). +### Few-shot prompting + +One of the most effective ways to improve model performance is to give a model examples of what you want it to do. The technique of adding example inputs and expected outputs to a model prompt is known as "few-shot prompting". There are a few things to think about when doing few-shot prompting: + +1. How are examples generated? +2. How many examples are in each prompt? +3. How are examples selected at runtime? +4. How are examples formatted in the prompt? + +Here are the considerations for each. + +#### 1. Generating examples + +The first and most important step of few-shot prompting is coming up with a good dataset of examples. Good examples should be relevant at runtime, clear, informative, and provide information that was not already known to the model. + +At a high-level, the basic ways to generate examples are: +- Manual: a person/people generates examples they think are useful. +- Better model: a better (presumably more expensive/slower) model's responses are used as examples for a worse (presumably cheaper/faster) model. +- User feedback: users (or labelers) leave feedback on interactions with the application and examples are generated based on that feedback (for example, all interactions with positive feedback could be turned into examples). +- LLM feedback: same as user feedback but the process is automated by having models evaluate themselves. + +Which approach is best depends on your task. For tasks where a small number core principles need to be understood really well, it can be valuable hand-craft a few really good examples. +For tasks where the space of correct behaviors is broader and more nuanced, it can be useful to generate many examples in a more automated fashion so that there's a higher likelihood of there being some highly relevant examples for any runtime input. + +**Single-turn v.s. multi-turn examples** + +Another dimension to think about when generating examples is what the example is actually showing. + +The simplest types of examples just have a user input and an expected model output. These are single-turn examples. + +One more complex type if example is where the example is an entire conversation, usually in which a model initially responds incorrectly and a user then tells the model how to correct its answer. +This is called a multi-turn example. Multi-turn examples can be useful for more nuanced tasks where its useful to show common errors and spell out exactly why they're wrong and what should be done instead. + +#### 2. Number of examples + +Once we have a dataset of examples, we need to think about how many examples should be in each prompt. +The key tradeoff is that more examples generally improve performance, but larger prompts increase costs and latency. +And beyond some threshold having too many examples can start to confuse the model. +Finding the right number of examples is highly dependent on the model, the task, the quality of the examples, and your cost and latency constraints. +Anecdotally, the better the model is the fewer examples it needs to perform well and the more quickly you hit steeply diminishing returns on adding more examples. +But, the best/only way to reliably answer this question is to run some experiments with different numbers of examples. + +#### 3. Selecting examples + +Assuming we are not adding our entire example dataset into each prompt, we need to have a way of selecting examples from our dataset based on a given input. We can do this: +- Randomly +- By (semantic or keyword-based) similarity of the inputs +- Based on some other constraints, like token size + +LangChain has a number of [`ExampleSelectors`](/docs/concepts/#example-selectors) which make it easy to use any of these techniques. + +Generally, selecting by semantic similarity leads to the best model performance. But how important this is is again model and task specific, and is something worth experimenting with. + +#### 4. Formatting examples + +Most state-of-the-art models these days are chat models, so we'll focus on formatting examples for those. Our basic options are to insert the examples: +- In the system prompt as a string +- As their own messages + +If we insert our examples into the system prompt as a string, we'll need to make sure it's clear to the model where each example begins and which parts are the input versus output. Different models respond better to different syntaxes, like [ChatML](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/chat-markup-language), XML, TypeScript, etc. + +If we insert our examples as messages, where each example is represented as a sequence of Human, AI messages, we might want to also assign [names](/docs/concepts/#messages) to our messages like `"example_user"` and `"example_assistant"` to make it clear that these messages correspond to different actors than the latest input message. + +**Formatting tool call examples** + +One area where formatting examples as messages can be tricky is when our example outputs have tool calls. This is because different models have different constraints on what types of message sequences are allowed when any tool calls are generated. +- Some models require that any AIMessage with tool calls be immediately followed by ToolMessages for every tool call, +- Some models additionally require that any ToolMessages be immediately followed by an AIMessage before the next HumanMessage, +- Some models require that tools are passed in to the model if there are any tool calls / ToolMessages in the chat history. + +These requirements are model-specific and should be checked for the model you are using. If your model requires ToolMessages after tool calls and/or AIMessages after ToolMessages and your examples only include expected tool calls and not the actual tool outputs, you can try adding dummy ToolMessages / AIMessages to the end of each example with generic contents to satisfy the API constraints. +In these cases it's especially worth experimenting with inserting your examples as strings versus messages, as having dummy messages can adversely affect certain models. + +You can see a case study of how Anthropic and OpenAI respond to different few-shot prompting techniques on two different tool calling benchmarks [here](https://blog.langchain.dev/few-shot-prompting-to-improve-tool-calling-performance/). + ### Retrieval LLMs are trained on a large but fixed dataset, limiting their ability to reason over private or recent information. Fine-tuning an LLM with specific facts is one way to mitigate this, but is often [poorly suited for factual recall](https://www.anyscale.com/blog/fine-tuning-is-for-form-not-facts) and [can be costly](https://www.glean.com/blog/how-to-build-an-ai-assistant-for-the-enterprise). From 55fd2e21584d6ef269ff3bdd2f2b7bd49889ee3c Mon Sep 17 00:00:00 2001 From: Scott Hurrey Date: Tue, 20 Aug 2024 22:23:43 -0400 Subject: [PATCH 32/80] box: add langchain box package and DocumentLoader (#25506) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Thank you for contributing to LangChain! -Description: Adding new package: `langchain-box`: * `langchain_box.document_loaders.BoxLoader` — DocumentLoader functionality * `langchain_box.utilities.BoxAPIWrapper` — Box-specific code * `langchain_box.utilities.BoxAuth` — Helper class for Box authentication * `langchain_box.utilities.BoxAuthType` — enum used by BoxAuth class - Twitter handle: @boxplatform - [x] **Add tests and docs**: If you're adding a new integration, please include 1. a test for the integration, preferably unit tests that do not rely on network access, 2. an example notebook showing its use. It lives in `docs/docs/integrations` directory. - [x] **Lint and test**: Run `make format`, `make lint` and `make test` from the root of the package(s) you've modified. See contribution guidelines for more: https://python.langchain.com/docs/contributing/ Additional guidelines: - Make sure optional dependencies are imported within a function. - Please do not add dependencies to pyproject.toml files (even optional ones) unless they are required for unit tests. - Most PRs should not touch more than one package. - Changes should be backwards compatible. - If you are adding something to community, do not re-import it in langchain. If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, ccurme, vbarda, hwchase17. --------- Co-authored-by: Erick Friis Co-authored-by: Erick Friis --- .../integrations/document_loaders/box.ipynb | 282 +++++ docs/docs/integrations/providers/box.mdx | 174 ++++ libs/partners/box/.gitignore | 1 + libs/partners/box/LICENSE | 21 + libs/partners/box/Makefile | 62 ++ libs/partners/box/README.md | 195 ++++ libs/partners/box/langchain_box/__init__.py | 19 + .../document_loaders/__init__.py | 5 + .../box/langchain_box/document_loaders/box.py | 218 ++++ libs/partners/box/langchain_box/py.typed | 0 .../box/langchain_box/utilities/__init__.py | 5 + .../box/langchain_box/utilities/box.py | 525 ++++++++++ libs/partners/box/poetry.lock | 984 ++++++++++++++++++ libs/partners/box/pyproject.toml | 92 ++ libs/partners/box/scripts/check_imports.py | 17 + libs/partners/box/scripts/check_pydantic.sh | 27 + libs/partners/box/scripts/lint_imports.sh | 18 + libs/partners/box/tests/__init__.py | 0 .../box/tests/integration_tests/__init__.py | 0 .../document_loaders/__init__.py | 0 .../document_loaders/test_box_file_loader.py | 42 + .../tests/integration_tests/test_compile.py | 7 + .../integration_tests/utilities/__init__.py | 0 .../utilities/test_box_util.py | 47 + .../partners/box/tests/unit_tests/__init__.py | 0 .../unit_tests/document_loaders/__init__.py | 0 .../document_loaders/test_box_loader.py | 58 ++ .../box/tests/unit_tests/test_imports.py | 13 + .../tests/unit_tests/utilities/__init__.py | 0 .../unit_tests/utilities/test_box_util.py | 101 ++ yarn.lock | 4 + 31 files changed, 2917 insertions(+) create mode 100644 docs/docs/integrations/document_loaders/box.ipynb create mode 100644 docs/docs/integrations/providers/box.mdx create mode 100644 libs/partners/box/.gitignore create mode 100644 libs/partners/box/LICENSE create mode 100644 libs/partners/box/Makefile create mode 100644 libs/partners/box/README.md create mode 100644 libs/partners/box/langchain_box/__init__.py create mode 100644 libs/partners/box/langchain_box/document_loaders/__init__.py create mode 100644 libs/partners/box/langchain_box/document_loaders/box.py create mode 100644 libs/partners/box/langchain_box/py.typed create mode 100644 libs/partners/box/langchain_box/utilities/__init__.py create mode 100644 libs/partners/box/langchain_box/utilities/box.py create mode 100644 libs/partners/box/poetry.lock create mode 100644 libs/partners/box/pyproject.toml create mode 100644 libs/partners/box/scripts/check_imports.py create mode 100755 libs/partners/box/scripts/check_pydantic.sh create mode 100755 libs/partners/box/scripts/lint_imports.sh create mode 100644 libs/partners/box/tests/__init__.py create mode 100644 libs/partners/box/tests/integration_tests/__init__.py create mode 100644 libs/partners/box/tests/integration_tests/document_loaders/__init__.py create mode 100644 libs/partners/box/tests/integration_tests/document_loaders/test_box_file_loader.py create mode 100644 libs/partners/box/tests/integration_tests/test_compile.py create mode 100644 libs/partners/box/tests/integration_tests/utilities/__init__.py create mode 100644 libs/partners/box/tests/integration_tests/utilities/test_box_util.py create mode 100644 libs/partners/box/tests/unit_tests/__init__.py create mode 100644 libs/partners/box/tests/unit_tests/document_loaders/__init__.py create mode 100644 libs/partners/box/tests/unit_tests/document_loaders/test_box_loader.py create mode 100644 libs/partners/box/tests/unit_tests/test_imports.py create mode 100644 libs/partners/box/tests/unit_tests/utilities/__init__.py create mode 100644 libs/partners/box/tests/unit_tests/utilities/test_box_util.py create mode 100644 yarn.lock diff --git a/docs/docs/integrations/document_loaders/box.ipynb b/docs/docs/integrations/document_loaders/box.ipynb new file mode 100644 index 0000000000000..1b949ec467d26 --- /dev/null +++ b/docs/docs/integrations/document_loaders/box.ipynb @@ -0,0 +1,282 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "---\n", + "sidebar_label: Box\n", + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# BoxLoader\n", + "\n", + "This notebook provides a quick overview for getting started with Box [document loader](/docs/integrations/document_loaders/). For detailed documentation of all BoxLoader features and configurations head to the [API reference](https://api.python.langchain.com/en/latest/document_loaders/langchain_community.document_loaders.langchain_box_loader.BoxLoader.html).\n", + "\n", + "\n", + "## Overview\n", + "\n", + "The `BoxLoader` class helps you get your unstructured content from Box in Langchain's `Document` format. You can do this with either a `List[str]` containing Box file IDs, or with a `str` containing a Box folder ID. \n", + "\n", + "You must provide either a `List[str]` containing Box file Ids, or a `str` containing a folder ID. If getting files from a folder with folder ID, you can also set a `Bool` to tell the loader to get all sub-folders in that folder, as well. \n", + "\n", + ":::info\n", + "A Box instance can contain Petabytes of files, and folders can contain millions of files. Be intentional when choosing what folders you choose to index. And we recommend never getting all files from folder 0 recursively. Folder ID 0 is your root folder.\n", + ":::\n", + "\n", + "Files without a text representation will be skipped.\n", + "\n", + "### Integration details\n", + "\n", + "| Class | Package | Local | Serializable | JS support|\n", + "| :--- | :--- | :---: | :---: | :---: |\n", + "| [BoxLoader](https://api.python.langchain.com/en/latest/document_loaders/langchain_box.document_loaders.langchain_boxloader.BoxLoader.html) | [langchain_box](https://api.python.langchain.com/en/latest/box_api_reference.html) | ✅ | ❌ | ❌ | \n", + "### Loader features\n", + "| Source | Document Lazy Loading | Async Support\n", + "| :---: | :---: | :---: | \n", + "| BoxLoader | ✅ | ❌ | \n", + "\n", + "## Setup\n", + "\n", + "In order to use the Box package, you will need a few things:\n", + "\n", + "* A Box account — If you are not a current Box customer or want to test outside of your production Box instance, you can use a [free developer account](https://account.box.com/signup/n/developer#ty9l3).\n", + "* [A Box app](https://developer.box.com/guides/getting-started/first-application/) — This is configured in the [developer console](https://account.box.com/developers/console), and for Box AI, must have the `Manage AI` scope enabled. Here you will also select your authentication method\n", + "* The app must be [enabled by the administrator](https://developer.box.com/guides/authorization/custom-app-approval/#manual-approval). For free developer accounts, this is whomever signed up for the account.\n", + "\n", + "### Credentials\n", + "\n", + "For these examples, we will use [token authentication](https://developer.box.com/guides/authentication/tokens/developer-tokens). This can be used with any [authentication method](https://developer.box.com/guides/authentication/). Just get the token with whatever methodology. If you want to learn more about how to use other authentication types with `langchain-box`, visit the [Box provider](/docs/integrations/providers/box) document.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Enter your Box Developer Token: ········\n" + ] + } + ], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "box_developer_token = getpass.getpass(\"Enter your Box Developer Token: \")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If you want to get automated tracing of your model calls you can also set your [LangSmith](https://docs.smith.langchain.com/) API key by uncommenting below:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# os.environ[\"LANGSMITH_API_KEY\"] = getpass.getpass(\"Enter your LangSmith API key: \")\n", + "# os.environ[\"LANGSMITH_TRACING\"] = \"true\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Installation\n", + "\n", + "Install **langchain_box**." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%pip install -qU langchain_box" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Initialization\n", + "\n", + "### Load files\n", + "\n", + "If you wish to load files, you must provide the `List` of file ids at instantiation time. \n", + "\n", + "This requires 1 piece of information:\n", + "\n", + "* **box_file_ids** (`List[str]`)- A list of Box file IDs. " + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_box.document_loaders import BoxLoader\n", + "\n", + "box_file_ids = [\"1514555423624\", \"1514553902288\"]\n", + "\n", + "loader = BoxLoader(\n", + " box_developer_token=box_developer_token,\n", + " box_file_ids=box_file_ids,\n", + " character_limit=10000, # Optional. Defaults to no limit\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Load from folder\n", + "\n", + "If you wish to load files from a folder, you must provide a `str` with the Box folder ID at instantiation time. \n", + "\n", + "This requires 1 piece of information:\n", + "\n", + "* **box_folder_id** (`str`)- A string containing a Box folder ID. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_box.document_loaders import BoxLoader\n", + "\n", + "box_folder_id = \"260932470532\"\n", + "\n", + "loader = BoxLoader(\n", + " box_folder_id=box_folder_id,\n", + " recursive=False, # Optional. return entire tree, defaults to False\n", + " character_limit=10000, # Optional. Defaults to no limit\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Document(metadata={'source': 'https://dl.boxcloud.com/api/2.0/internal_files/1514555423624/versions/1663171610024/representations/extracted_text/content/', 'title': 'Invoice-A5555_txt'}, page_content='Vendor: AstroTech Solutions\\nInvoice Number: A5555\\n\\nLine Items:\\n - Gravitational Wave Detector Kit: $800\\n - Exoplanet Terrarium: $120\\nTotal: $920')" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "docs = loader.load()\n", + "docs[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'source': 'https://dl.boxcloud.com/api/2.0/internal_files/1514555423624/versions/1663171610024/representations/extracted_text/content/', 'title': 'Invoice-A5555_txt'}\n" + ] + } + ], + "source": [ + "print(docs[0].metadata)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Lazy Load" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "page = []\n", + "for doc in loader.lazy_load():\n", + " page.append(doc)\n", + " if len(page) >= 10:\n", + " # do some paged operation, e.g.\n", + " # index.upsert(page)\n", + "\n", + " page = []" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## API reference\n", + "\n", + "For detailed documentation of all BoxLoader features and configurations head to the API reference: https://api.python.langchain.com/en/latest/document_loaders/langchain_box.document_loaders.langchain_box_loader.BoxLoader.html\n", + "\n", + "\n", + "## Help\n", + "\n", + "If you have questions, you can check out our [developer documentation](https://developer.box.com) or reach out to use in our [developer community](https://community.box.com)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.6" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/docs/docs/integrations/providers/box.mdx b/docs/docs/integrations/providers/box.mdx new file mode 100644 index 0000000000000..52b2b5d360ed8 --- /dev/null +++ b/docs/docs/integrations/providers/box.mdx @@ -0,0 +1,174 @@ +# Box + +[Box](https://box.com) is the Intelligent Content Cloud, a single platform that enables +organizations to fuel collaboration, manage the entire content lifecycle, secure critical content, +and transform business workflows with enterprise AI. Founded in 2005, Box simplifies work for +leading global organizations, including AstraZeneca, JLL, Morgan Stanley, and Nationwide. + +In this package, we make available a number of ways to include Box content in your AI workflows. + +### Installation and setup + +```text +%pip install -U langchain-box + +``` + +# langchain-box + +This package contains the LangChain integration with Box. For more information about +Box, check out our [developer documentation](https://developer.box.com). + +## Pre-requisites + +In order to integrate with Box, you need a few things: + +* A Box instance — if you are not a current Box customer, sign up for a +[free dev account](https://account.box.com/signup/n/developer#ty9l3). +* A Box app — more on how to +[create an app](https://developer.box.com/guides/getting-started/first-application/) +* Your app approved in your Box instance — This is done by your admin. +The good news is if you are using a free developer account, you are the admin. +[Authorize your app](https://developer.box.com/guides/authorization/custom-app-approval/#manual-approval) + +## Installation + +```bash +pip install -U langchain-box +``` + +## Authentication + +The `box-langchain` package offers some flexibility to authentication. The +most basic authentication method is by using a developer token. This can be +found in the [Box developer console](https://account.box.com/developers/console) +on the configuration screen. This token is purposely short-lived (1 hour) and is +intended for development. With this token, you can add it to your environment as +`BOX_DEVELOPER_TOKEN`, you can pass it directly to the loader, or you can use the +`BoxAuth` authentication helper class. + +We will cover passing it directly to the loader in the section below. + +### BoxAuth helper class + +`BoxAuth` supports the following authentication methods: + +* Token — either a developer token or any token generated through the Box SDK +* JWT with a service account +* JWT with a specified user +* CCG with a service account +* CCG with a specified user + +:::note +If using JWT authentication, you will need to download the configuration from the Box +developer console after generating your public/private key pair. Place this file in your +application directory structure somewhere. You will use the path to this file when using +the `BoxAuth` helper class. +::: + +For more information, learn about how to +[set up a Box application](https://developer.box.com/guides/getting-started/first-application/), +and check out the +[Box authentication guide](https://developer.box.com/guides/authentication/select/) +for more about our different authentication options. + +Examples: + +**Token** + +```python +from langchain_box.document_loaders import BoxLoader +from langchain_box.utilities import BoxAuth, BoxAuthType + +auth = BoxAuth( + auth_type=BoxAuthType.TOKEN, + box_developer_token=box_developer_token +) + +loader = BoxLoader( + box_auth=auth, + ... +) +``` + +**JWT with a service account** + +```python +from langchain_box.document_loaders import BoxLoader +from langchain_box.utilities import BoxAuth, BoxAuthType + +auth = BoxAuth( + auth_type=BoxAuthType.JWT, + box_jwt_path=box_jwt_path +) + +loader = BoxLoader( + box_auth=auth, + ... +``` + +**JWT with a specified user** + +```python +from langchain_box.document_loaders import BoxLoader +from langchain_box.utilities import BoxAuth, BoxAuthType + +auth = BoxAuth( + auth_type=BoxAuthType.JWT, + box_jwt_path=box_jwt_path, + box_user_id=box_user_id +) + +loader = BoxLoader( + box_auth=auth, + ... +``` + +**CCG with a service account** + +```python +from langchain_box.document_loaders import BoxLoader +from langchain_box.utilities import BoxAuth, BoxAuthType + +auth = BoxAuth( + auth_type=BoxAuthType.CCG, + box_client_id=box_client_id, + box_client_secret=box_client_secret, + box_enterprise_id=box_enterprise_id +) + +loader = BoxLoader( + box_auth=auth, + ... +``` + +**CCG with a specified user** + +```python +from langchain_box.document_loaders import BoxLoader +from langchain_box.utilities import BoxAuth, BoxAuthType + +auth = BoxAuth( + auth_type=BoxAuthType.CCG, + box_client_id=box_client_id, + box_client_secret=box_client_secret, + box_user_id=box_user_id +) + +loader = BoxLoader( + box_auth=auth, + ... +``` + +If you wish to use OAuth2 with the authorization_code flow, please use `BoxAuthType.TOKEN` with the token you have acquired. + +## Document Loaders + +### BoxLoader + +[See usage example](/docs/integrations/document_loaders/box) + +```python +from langchain_box.document_loaders import BoxLoader + +``` diff --git a/libs/partners/box/.gitignore b/libs/partners/box/.gitignore new file mode 100644 index 0000000000000..bee8a64b79a99 --- /dev/null +++ b/libs/partners/box/.gitignore @@ -0,0 +1 @@ +__pycache__ diff --git a/libs/partners/box/LICENSE b/libs/partners/box/LICENSE new file mode 100644 index 0000000000000..fc0602feecdd6 --- /dev/null +++ b/libs/partners/box/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 LangChain, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/libs/partners/box/Makefile b/libs/partners/box/Makefile new file mode 100644 index 0000000000000..2ecf3f3a156bb --- /dev/null +++ b/libs/partners/box/Makefile @@ -0,0 +1,62 @@ +.PHONY: all format lint test tests integration_tests docker_tests help extended_tests + +# Default target executed when no arguments are given to make. +all: help + +# Define a variable for the test file path. +TEST_FILE ?= tests/unit_tests/ +integration_test integration_tests: TEST_FILE = tests/integration_tests/ + + +# unit tests are run with the --disable-socket flag to prevent network calls +test tests: + poetry run pytest --disable-socket --allow-unix-socket $(TEST_FILE) + +# integration tests are run without the --disable-socket flag to allow network calls +integration_test integration_tests: + poetry run pytest $(TEST_FILE) + +###################### +# LINTING AND FORMATTING +###################### + +# Define a variable for Python and notebook files. +PYTHON_FILES=. +MYPY_CACHE=.mypy_cache +lint format: PYTHON_FILES=. +lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/partners/box --name-only --diff-filter=d master | grep -E '\.py$$|\.ipynb$$') +lint_package: PYTHON_FILES=langchain_box +lint_tests: PYTHON_FILES=tests +lint_tests: MYPY_CACHE=.mypy_cache_test + +lint lint_diff lint_package lint_tests: + poetry run ruff . + poetry run ruff format $(PYTHON_FILES) --diff + poetry run ruff --select I $(PYTHON_FILES) + mkdir -p $(MYPY_CACHE); poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) + +format format_diff: + poetry run ruff format $(PYTHON_FILES) + poetry run ruff --select I --fix $(PYTHON_FILES) + +spell_check: + poetry run codespell --toml pyproject.toml + +spell_fix: + poetry run codespell --toml pyproject.toml -w + +check_imports: $(shell find langchain_box -name '*.py') + poetry run python ./scripts/check_imports.py $^ + +###################### +# HELP +###################### + +help: + @echo '----' + @echo 'check_imports - check imports' + @echo 'format - run code formatters' + @echo 'lint - run linters' + @echo 'test - run unit tests' + @echo 'tests - run unit tests' + @echo 'test TEST_FILE= - run all tests in file' diff --git a/libs/partners/box/README.md b/libs/partners/box/README.md new file mode 100644 index 0000000000000..9c091725733d6 --- /dev/null +++ b/libs/partners/box/README.md @@ -0,0 +1,195 @@ +# langchain-box + +This package contains the LangChain integration with Box. For more information about +Box, check out our [developer documentation](https://developer.box.com). + +## Pre-requisites + +In order to integrate with Box, you need a few things: + +* A Box instance — if you are not a current Box customer, sign up for a +[free dev account](https://account.box.com/signup/n/developer#ty9l3). +* A Box app — more on how to +[create an app](https://developer.box.com/guides/getting-started/first-application/) +* Your app approved in your Box instance — This is done by your admin. +The good news is if you are using a free developer account, you are the admin. +[Authorize your app](https://developer.box.com/guides/authorization/custom-app-approval/#manual-approval) + +## Installation + +```bash +pip install -U langchain-box +``` + +## Authentication + +The `box-langchain` package offers some flexibility to authentication. The +most basic authentication method is by using a developer token. This can be +found in the [Box developer console](https://account.box.com/developers/console) +on the configuration screen. This token is purposely short-lived (1 hour) and is +intended for development. With this token, you can add it to your environment as +`BOX_DEVELOPER_TOKEN`, you can pass it directly to the loader, or you can use the +`BoxAuth` authentication helper class. + +We will cover passing it directly to the loader in the section below. + +### BoxAuth helper class + +`BoxAuth` supports the following authentication methods: + +* Token — either a developer token or any token generated through the Box SDK +* JWT with a service account +* JWT with a specified user +* CCG with a service account +* CCG with a specified user + +> [!NOTE] +> If using JWT authentication, you will need to download the configuration from the Box +> developer console after generating your public/private key pair. Place this file in your +> application directory structure somewhere. You will use the path to this file when using +> the `BoxAuth` helper class. + +For more information, learn about how to +[set up a Box application](https://developer.box.com/guides/getting-started/first-application/), +and check out the +[Box authentication guide](https://developer.box.com/guides/authentication/select/) +for more about our different authentication options. + +Examples: + +**Token** + +```python +from langchain_box.document_loaders import BoxLoader +from langchain_box.utilities import BoxAuth, BoxAuthType + +auth = BoxAuth( + auth_type=BoxAuthType.TOKEN, + box_developer_token=box_developer_token +) + +loader = BoxLoader( + box_auth=auth, + ... +) +``` + +**JWT with a service account** + +```python +from langchain_box.document_loaders import BoxLoader +from langchain_box.utilities import BoxAuth, BoxAuthType + +auth = BoxAuth( + auth_type=BoxAuthType.JWT, + box_jwt_path=box_jwt_path +) + +loader = BoxLoader( + box_auth=auth, + ... +``` + +**JWT with a specified user** + +```python +from langchain_box.document_loaders import BoxLoader +from langchain_box.utilities import BoxAuth, BoxAuthType + +auth = BoxAuth( + auth_type=BoxAuthType.JWT, + box_jwt_path=box_jwt_path, + box_user_id=box_user_id +) + +loader = BoxLoader( + box_auth=auth, + ... +``` + +**CCG with a service account** + +```python +from langchain_box.document_loaders import BoxLoader +from langchain_box.utilities import BoxAuth, BoxAuthType + +auth = BoxAuth( + auth_type=BoxAuthType.CCG, + box_client_id=box_client_id, + box_client_secret=box_client_secret, + box_enterprise_id=box_enterprise_id +) + +loader = BoxLoader( + box_auth=auth, + ... +``` + +**CCG with a specified user** + +```python +from langchain_box.document_loaders import BoxLoader +from langchain_box.utilities import BoxAuth, BoxAuthType + +auth = BoxAuth( + auth_type=BoxAuthType.CCG, + box_client_id=box_client_id, + box_client_secret=box_client_secret, + box_user_id=box_user_id +) + +loader = BoxLoader( + box_auth=auth, + ... +``` + +## Document Loaders + +The `BoxLoader` class helps you get your unstructured content from Box +in Langchain's `Document` format. You can do this with either a `List[str]` +containing Box file IDs, or with a `str` containing a Box folder ID. + +If getting files from a folder with folder ID, you can also set a `Bool` to +tell the loader to get all sub-folders in that folder, as well. + +:::info +A Box instance can contain Petabytes of files, and folders can contain millions +of files. Be intentional when choosing what folders you choose to index. And we +recommend never getting all files from folder 0 recursively. Folder ID 0 is your +root folder. +::: + +### Load files + +```python +import os + +from langchain_box.document_loaders import BoxLoader + +os.environ["BOX_DEVELOPER_TOKEN"] = "df21df2df21df2d1f21df2df1" + +loader = BoxLoader( + box_file_ids=["12345", "67890"], + character_limit=10000 # Optional. Defaults to no limit +) + +docs = loader.lazy_load() +``` + +### Load from folder + +```python +import os + +from langchain_box.document_loaders import BoxLoader + +os.environ["BOX_DEVELOPER_TOKEN"] = "df21df2df21df2d1f21df2df1" + +loader = BoxLoader( + box_folder_id="12345", + recursive=False, # Optional. return entire tree, defaults to False + character_limit=10000 # Optional. Defaults to no limit +) + +docs = loader.lazy_load() +``` \ No newline at end of file diff --git a/libs/partners/box/langchain_box/__init__.py b/libs/partners/box/langchain_box/__init__.py new file mode 100644 index 0000000000000..9959317438b08 --- /dev/null +++ b/libs/partners/box/langchain_box/__init__.py @@ -0,0 +1,19 @@ +from importlib import metadata + +from langchain_box.document_loaders import BoxLoader +from langchain_box.utilities import BoxAPIWrapper, BoxAuth, BoxAuthType + +try: + __version__ = metadata.version(__package__) +except metadata.PackageNotFoundError: + # Case where package metadata is not available. + __version__ = "" +del metadata # optional, avoids polluting the results of dir(__package__) + +__all__ = [ + "BoxLoader", + "BoxAuth", + "BoxAuthType", + "BoxAPIWrapper", + "__version__", +] diff --git a/libs/partners/box/langchain_box/document_loaders/__init__.py b/libs/partners/box/langchain_box/document_loaders/__init__.py new file mode 100644 index 0000000000000..91a4ca3b885ac --- /dev/null +++ b/libs/partners/box/langchain_box/document_loaders/__init__.py @@ -0,0 +1,5 @@ +"""Box Document Loaders.""" + +from langchain_box.document_loaders.box import BoxLoader + +__all__ = ["BoxLoader"] diff --git a/libs/partners/box/langchain_box/document_loaders/box.py b/libs/partners/box/langchain_box/document_loaders/box.py new file mode 100644 index 0000000000000..05b2daf717766 --- /dev/null +++ b/libs/partners/box/langchain_box/document_loaders/box.py @@ -0,0 +1,218 @@ +from typing import Any, Dict, Iterator, List, Optional + +from box_sdk_gen import FileBaseTypeField # type: ignore +from langchain_core.document_loaders.base import BaseLoader +from langchain_core.documents import Document +from langchain_core.pydantic_v1 import BaseModel, ConfigDict, root_validator + +from langchain_box.utilities import BoxAPIWrapper, BoxAuth + + +class BoxLoader(BaseLoader, BaseModel): + """ + BoxLoader + + This class will help you load files from your Box instance. You must have a + Box account. If you need one, you can sign up for a free developer account. + You will also need a Box application created in the developer portal, where + you can select your authorization type. + + If you wish to use either of the Box AI options, you must be on an Enterprise + Plus plan or above. The free developer account does not have access to Box AI. + + In addition, using the Box AI API requires a few prerequisite steps: + * Your administrator must enable the Box AI API + * You must enable the `Manage AI` scope in your app in the developer console. + * Your administratormust install and enable your application. + + Setup: + Install ``langchain-box`` and set environment variable ``BOX_DEVELOPER_TOKEN``. + + .. code-block:: bash + + pip install -U langchain-box + export BOX_DEVELOPER_TOKEN="your-api-key" + + This loader returns ``Document `` objects built from text representations of files + in Box. It will skip any document without a text representation available. You can + provide either a ``List[str]`` containing Box file IDS, or you can provide a + ``str`` contining a Box folder ID. If providing a folder ID, you can also enable + recursive mode to get the full tree under that folder. + + :::info + A Box instance can contain Petabytes of files, and folders can contain millions + of files. Be intentional when choosing what folders you choose to index. And we + recommend never getting all files from folder 0 recursively. Folder ID 0 is your + root folder. + ::: + + Instantiate: + + Initialization variables + variable | description | type | required + ---+---+--- + box_developer_token | token to use for auth. | string | no + box_auth | client id for you app. Used for CCG | string | no + box_file_ids | Array of Box file Ids to retrieve | array of strings | no + box_folder_id | Box folder id to retrieve | string | no + recursive | whether to return subfolders, default False | bool | no + + Get files — this method requires you pass the ``box_file_ids`` parameter. This is a + ``List[str]`` containing the file IDs you wish to index. + + .. code-block:: python + + from langchain_box.document_loaders import BoxLoader + + box_file_ids = ["1514555423624", "1514553902288"] + + loader = BoxLoader( + box_file_ids=box_file_ids, + character_limit=10000 # Optional. Defaults to no limit + ) + + Get files in a folder — this method requires you pass the ``box_folder_id`` + parameter. This is a ``str`` containing the folder ID you wish to index. + + .. code-block:: python + + from langchain_box.document_loaders import BoxLoader + + box_folder_id = "260932470532" + + loader = BoxLoader( + box_folder_id=box_folder_id, + recursive=False # Optional. return entire tree, defaults to False + ) + + Load: + .. code-block:: python + + docs = loader.load() + docs[0] + + .. code-block:: python + + Document(metadata={'source': 'https://dl.boxcloud.com/api/2.0/ + internal_files/1514555423624/versions/1663171610024/representations + /extracted_text/content/', 'title': 'Invoice-A5555_txt'}, + page_content='Vendor: AstroTech Solutions\nInvoice Number: A5555\n\nLine + Items:\n - Gravitational Wave Detector Kit: $800\n - Exoplanet + Terrarium: $120\nTotal: $920') + + Lazy load: + .. code-block:: python + + docs = [] + docs_lazy = loader.lazy_load() + + for doc in docs_lazy: + docs.append(doc) + print(docs[0].page_content[:100]) + print(docs[0].metadata) + + .. code-block:: python + + Document(metadata={'source': 'https://dl.boxcloud.com/api/2.0/ + internal_files/1514555423624/versions/1663171610024/representations + /extracted_text/content/', 'title': 'Invoice-A5555_txt'}, + page_content='Vendor: AstroTech Solutions\nInvoice Number: A5555\n\nLine + Items:\n - Gravitational Wave Detector Kit: $800\n - Exoplanet + Terrarium: $120\nTotal: $920') + """ + + model_config = ConfigDict(use_enum_values=True) + + """String containing the Box Developer Token generated in the developer console""" + box_developer_token: Optional[str] = None + """Configured langchain_box.utilities.BoxAuth object""" + box_auth: Optional[BoxAuth] = None + """List[str] containing Box file ids""" + box_file_ids: Optional[List[str]] = None + """String containing box folder id to load files from""" + box_folder_id: Optional[str] = None + """If getting files by folder id, recursive is a bool to determine if you wish + to traverse subfolders to return child documents. Default is False""" + recursive: Optional[bool] = False + """character_limit is an int that caps the number of characters to + return per document.""" + character_limit: Optional[int] = -1 + + box: Optional[BoxAPIWrapper] + + class Config: + arbitrary_types_allowed = True + extra = "allow" + + @root_validator(allow_reuse=True) + def validate_box_loader_inputs(cls, values: Dict[str, Any]) -> Dict[str, Any]: + box = None + + """Validate that has either box_file_ids or box_folder_id.""" + if not values.get("box_file_ids") and not values.get("box_folder_id"): + raise ValueError("You must provide box_file_ids or box_folder_id.") + + """Validate that we don't have both box_file_ids and box_folder_id.""" + if values.get("box_file_ids") and values.get("box_folder_id"): + raise ValueError( + "You must provide either box_file_ids or box_folder_id, not both." + ) + + """Validate that we have either a box_developer_token or box_auth.""" + if not values.get("box_auth") and not values.get("box_developer_token"): + raise ValueError( + "you must provide box_developer_token or a box_auth " + "generated with langchain_box.utilities.BoxAuth" + ) + + box = BoxAPIWrapper( # type: ignore[call-arg] + box_developer_token=values.get("box_developer_token"), + box_auth=values.get("box_auth"), + character_limit=values.get("character_limit"), + ) + + values["box"] = box + + return values + + def _get_files_from_folder(self, folder_id): # type: ignore[no-untyped-def] + folder_content = self.box.get_folder_items(folder_id) + + for file in folder_content: + try: + if file.type == FileBaseTypeField.FILE: + doc = self.box.get_document_by_file_id(file.id) + + if doc is not None: + yield doc + + elif file.type == "folder" and self.recursive: + try: + yield from self._get_files_from_folder(file.id) + except TypeError: + pass + except TypeError: + pass + + def lazy_load(self) -> Iterator[Document]: + """Load documents. Accepts no arguments. Returns `Iterator[Document]`""" + if self.box_file_ids: + for file_id in self.box_file_ids: + try: + file = self.box.get_document_by_file_id(file_id) # type: ignore[union-attr] + + if file is not None: + yield file + except TypeError: + pass + elif self.box_folder_id: + try: + yield from self._get_files_from_folder(self.box_folder_id) + except TypeError: + pass + except Exception as e: + print(f"Exception {e}") # noqa: T201 + else: + raise ValueError( + "You must provide either `box_file_ids` or `box_folder_id`" + ) diff --git a/libs/partners/box/langchain_box/py.typed b/libs/partners/box/langchain_box/py.typed new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/box/langchain_box/utilities/__init__.py b/libs/partners/box/langchain_box/utilities/__init__.py new file mode 100644 index 0000000000000..91bb3148d2b69 --- /dev/null +++ b/libs/partners/box/langchain_box/utilities/__init__.py @@ -0,0 +1,5 @@ +"""Box API Utilities.""" + +from langchain_box.utilities.box import BoxAPIWrapper, BoxAuth, BoxAuthType + +__all__ = ["BoxAuth", "BoxAuthType", "BoxAPIWrapper"] diff --git a/libs/partners/box/langchain_box/utilities/box.py b/libs/partners/box/langchain_box/utilities/box.py new file mode 100644 index 0000000000000..06f4080d15c88 --- /dev/null +++ b/libs/partners/box/langchain_box/utilities/box.py @@ -0,0 +1,525 @@ +"""Util that calls Box APIs.""" + +from enum import Enum +from typing import Any, Dict, Optional + +import box_sdk_gen # type: ignore +import requests +from langchain_core.documents import Document +from langchain_core.pydantic_v1 import BaseModel, root_validator +from langchain_core.utils import get_from_dict_or_env + + +class DocumentFiles(Enum): + DOC = "doc" + DOCX = "docx" + GDOC = "gdoc" + GSHEET = "gsheet" + NUMBERS = "numbers" + ODS = "ods" + ODT = "odt" + PAGES = "pages" + PDF = "pdf" + RTF = "rtf" + WPD = "wpd" + XLS = "xls" + XLSM = "xlsm" + XLSX = "xlsx" + AS = "as" + AS3 = "as3" + ASM = "asm" + BAT = "bat" + C = "c" + CC = "cc" + CMAKE = "cmake" + CPP = "cpp" + CS = "cs" + CSS = "css" + CSV = "csv" + CXX = "cxx" + DIFF = "diff" + ERB = "erb" + GROOVY = "groovy" + H = "h" + HAML = "haml" + HH = "hh" + HTM = "htm" + HTML = "html" + JAVA = "java" + JS = "js" + JSON = "json" + LESS = "less" + LOG = "log" + M = "m" + MAKE = "make" + MD = "md" + ML = "ml" + MM = "mm" + MSG = "msg" + PHP = "php" + PL = "pl" + PROPERTIES = "properties" + PY = "py" + RB = "rb" + RST = "rst" + SASS = "sass" + SCALA = "scala" + SCM = "scm" + SCRIPT = "script" + SH = "sh" + SML = "sml" + SQL = "sql" + TXT = "txt" + VI = "vi" + VIM = "vim" + WEBDOC = "webdoc" + XHTML = "xhtml" + XLSB = "xlsb" + XML = "xml" + XSD = "xsd" + XSL = "xsl" + YAML = "yaml" + GSLLIDE = "gslide" + GSLIDES = "gslides" + KEY = "key" + ODP = "odp" + PPT = "ppt" + PPTX = "pptx" + BOXNOTE = "boxnote" + + +class ImageFiles(Enum): + ARW = "arw" + BMP = "bmp" + CR2 = "cr2" + DCM = "dcm" + DICM = "dicm" + DICOM = "dicom" + DNG = "dng" + EPS = "eps" + EXR = "exr" + GIF = "gif" + HEIC = "heic" + INDD = "indd" + INDML = "indml" + INDT = "indt" + INX = "inx" + JPEG = "jpeg" + JPG = "jpg" + NEF = "nef" + PNG = "png" + SVG = "svg" + TIF = "tif" + TIFF = "tiff" + TGA = "tga" + SVS = "svs" + + +""" + BoxAuthType + an enum to tell BoxLoader how you wish to autheticate your Box connection. + + Options are: + TOKEN - Use a developer token generated from the Box Deevloper Token. + Only recommended for development. + Provide `box_developer_token`. + CCG - Client Credentials Grant. + provide `box_client_id`, `box_client_secret`, + and `box_enterprise_id` or optionally `box_user_id`. + JWT - Use JWT for authentication. Config should be stored on the file + system accessible to your app. + provide `box_jwt_path`. Optionally, provide `box_user_id` to + act as a specific user +""" + + +class BoxAuthType(Enum): + """Use a developer token or a token retrieved from box-sdk-gen""" + + TOKEN = "token" + """Use `client_credentials` type grant""" + CCG = "ccg" + """Use JWT bearer token auth""" + JWT = "jwt" + + +""" + `BoxAuth` supports the following authentication methods: + + * Token — either a developer token or any token generated through the Box SDK + * JWT with a service account + * JWT with a specified user + * CCG with a service account + * CCG with a specified user + + :::note + If using JWT authentication, you will need to download the configuration from the + Box developer console after generating your public/private key pair. Place this + file in your application directory structure somewhere. You will use the path to + this file when using the `BoxAuth` helper class. + ::: + + For more information, learn about how to + [set up a Box application](https://developer.box.com/guides/getting-started/first-application/), + and check out the + [Box authentication guide](https://developer.box.com/guides/authentication/select/) + for more about our different authentication options. + + Simple implementation + + To instantiate, you must provide a ``langchain_box.utilities.BoxAuthType``. + + BoxAuthType is an enum to tell BoxLoader how you wish to autheticate your + Box connection. + + Options are: + TOKEN - Use a developer token generated from the Box Deevloper Token. + Only recommended for development. + Provide `box_developer_token`. + CCG - Client Credentials Grant. + provide `box_client_id`, `box_client_secret`, + and `box_enterprise_id` or optionally `box_user_id`. + JWT - Use JWT for authentication. Config should be stored on the file + system accessible to your app. + provide `box_jwt_path`. Optionally, provide `box_user_id` to + act as a specific user + + .. code-block:: python + from langchain_box.document_loaders import BoxLoader + from langchain_box.utilities import BoxAuth, BoxAuthType + + auth = BoxAuth( + auth_type=BoxAuthType.TOKEN, + box_developer_token=box_developer_token + ) + + loader = BoxLoader( + box_auth=auth, + ... + ) + + To see examples for each supported authentication methodology, visit the + [Box providers](/docs/integrations/providers/box) page. If you want to + use OAuth 2.0 `authorization_code` flow, use + [box-sdk-gen](https://github.com/box/box-python-sdk-gen) SDK, get your + token, and use `BoxAuthType.TOKEN` type. +""" + + +class BoxAuth(BaseModel): + """Authentication type to use. Must pass BoxAuthType enum""" + + auth_type: BoxAuthType + """ If using BoxAuthType.TOKEN, provide your token here""" + box_developer_token: Optional[str] = None + """If using BoxAuthType.JWT, provide local path to your + JWT configuration file""" + box_jwt_path: Optional[str] = None + """If using BoxAuthType.CCG, provide your app's client ID""" + box_client_id: Optional[str] = None + """If using BoxAuthType.CCG, provide your app's client secret""" + box_client_secret: Optional[str] = None + """If using BoxAuthType.CCG, provide your enterprise ID. + Only required if you are not sending `box_user_id`""" + box_enterprise_id: Optional[str] = None + """If using BoxAuthType.CCG or BoxAuthType.JWT, providing + `box_user_id` will act on behalf of a specific user""" + box_user_id: Optional[str] = None + + box_client: Optional[box_sdk_gen.BoxClient] = None + custom_header: Dict = dict({"x-box-ai-library": "langchain"}) + + class Config: + arbitrary_types_allowed = True + use_enum_values = True + extra = "allow" + + @root_validator() + def validate_box_auth_inputs(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Validate auth_type is set""" + if not values.get("auth_type"): + raise ValueError("Auth type must be set.") + + """Validate that TOKEN auth type provides box_developer_token.""" + if values.get("auth_type") == "token": + if not get_from_dict_or_env( + values, "box_developer_token", "BOX_DEVELOPER_TOKEN" + ): + raise ValueError( + f"{values.get('auth_type')} requires box_developer_token to be set" + ) + + """Validate that JWT auth type provides box_jwt_path.""" + if values.get("auth_type") == "jwt": + if not get_from_dict_or_env(values, "box_jwt_path", "BOX_JWT_PATH"): + raise ValueError( + f"{values.get('auth_type')} requires box_jwt_path to be set" + ) + + """Validate that CCG auth type provides box_client_id and + box_client_secret and either box_enterprise_id or box_user_id.""" + if values.get("auth_type") == "ccg": + if ( + not get_from_dict_or_env(values, "box_client_id", "BOX_CLIENT_ID") + or not get_from_dict_or_env( + values, "box_client_secret", "BOX_CLIENT_SECRET" + ) + or ( + not values.get("box_enterprise_id") + and not values.get("box_user_id") + ) + ): + raise ValueError( + f"{values.get('auth_type')} requires box_client_id, \ + box_client_secret, and box_enterprise_id." + ) + + return values + + def authorize(self) -> None: + match self.auth_type: + case "token": + try: + auth = box_sdk_gen.BoxDeveloperTokenAuth( + token=self.box_developer_token + ) + self.box_client = box_sdk_gen.BoxClient( + auth=auth + ).with_extra_headers(extra_headers=self.custom_header) + + except box_sdk_gen.BoxSDKError as bse: + raise RuntimeError( + f"Error getting client from developer token: {bse.message}" + ) + except Exception as ex: + raise ValueError( + f"Invalid Box developer token. Please verify your \ + token and try again.\n{ex}" + ) from ex + + case "jwt": + try: + jwt_config = box_sdk_gen.JWTConfig.from_config_file( + config_file_path=self.box_jwt_path + ) + auth = box_sdk_gen.BoxJWTAuth(config=jwt_config) + + self.box_client = box_sdk_gen.BoxClient( + auth=auth + ).with_extra_headers(extra_headers=self.custom_header) + + if self.box_user_id is not None: + user_auth = auth.with_user_subject(self.box_user_id) + self.box_client = box_sdk_gen.BoxClient( + auth=user_auth + ).with_extra_headers(extra_headers=self.custom_header) + + except box_sdk_gen.BoxSDKError as bse: + raise RuntimeError( + f"Error getting client from jwt token: {bse.message}" + ) + except Exception as ex: + raise ValueError( + "Error authenticating. Please verify your JWT config \ + and try again." + ) from ex + + case "ccg": + try: + if self.box_user_id is not None: + ccg_config = box_sdk_gen.CCGConfig( + client_id=self.box_client_id, + client_secret=self.box_client_secret, + user_id=self.box_user_id, + ) + else: + ccg_config = box_sdk_gen.CCGConfig( + client_id=self.box_client_id, + client_secret=self.box_client_secret, + enterprise_id=self.box_enterprise_id, + ) + auth = box_sdk_gen.BoxCCGAuth(config=ccg_config) + + self.box_client = box_sdk_gen.BoxClient( + auth=auth + ).with_extra_headers(extra_headers=self.custom_header) + + except box_sdk_gen.BoxSDKError as bse: + raise RuntimeError( + f"Error getting client from ccg token: {bse.message}" + ) + except Exception as ex: + raise ValueError( + "Error authenticating. Please verify you are providing a \ + valid client id, secret and either a valid user ID or \ + enterprise ID." + ) from ex + + case _: + raise ValueError( + f"{self.auth_type} is not a valid auth_type. Value must be \ + TOKEN, CCG, or JWT." + ) + + def get_client(self) -> box_sdk_gen.BoxClient: + """Instantiate the Box SDK.""" + if self.box_client is None: + self.authorize() + + return self.box_client + + +class BoxAPIWrapper(BaseModel): + """Wrapper for Box API.""" + + """String containing the Box Developer Token generated in the developer console""" + box_developer_token: Optional[str] = None + """Configured langchain_box.utilities.BoxAuth object""" + box_auth: Optional[BoxAuth] = None + """character_limit is an int that caps the number of characters to + return per document.""" + character_limit: Optional[int] = -1 + + box: Optional[box_sdk_gen.BoxClient] + file_count: int = 0 + + class Config: + arbitrary_types_allowed = True + use_enum_values = True + extra = "allow" + + @root_validator(allow_reuse=True) + def validate_box_api_inputs(cls, values: Dict[str, Any]) -> Dict[str, Any]: + values["box"] = None + + """Validate that TOKEN auth type provides box_developer_token.""" + if not values.get("box_auth"): + if not get_from_dict_or_env( + values, "box_developer_token", "BOX_DEVELOPER_TOKEN" + ): + raise ValueError( + "You must configure either box_developer_token of box_auth" + ) + else: + box_auth = values.get("box_auth") + values["box"] = box_auth.get_client() # type: ignore[union-attr] + + return values + + def get_box_client(self) -> box_sdk_gen.BoxClient: + box_auth = BoxAuth( + auth_type=BoxAuthType.TOKEN, box_developer_token=self.box_developer_token + ) + + self.box = box_auth.get_client() + + def _do_request(self, url: str) -> Any: + try: + access_token = self.box.auth.retrieve_token().access_token # type: ignore[union-attr] + except box_sdk_gen.BoxSDKError as bse: + raise RuntimeError(f"Error getting client from jwt token: {bse.message}") + + resp = requests.get(url, headers={"Authorization": f"Bearer {access_token}"}) + resp.raise_for_status() + return resp.content + + def get_folder_items(self, folder_id: str) -> box_sdk_gen.Items: + """Get all the items in a folder. Accepts folder_id as str. + returns box_sdk_gen.Items""" + if self.box is None: + self.get_box_client() + + try: + folder_contents = self.box.folders.get_folder_items( # type: ignore[union-attr] + folder_id, fields=["id", "type", "name"] + ) + except box_sdk_gen.BoxAPIError as bae: + raise RuntimeError( + f"BoxAPIError: Error getting folder content: {bae.message}" + ) + except box_sdk_gen.BoxSDKError as bse: + raise RuntimeError( + f"BoxSDKError: Error getting folder content: {bse.message}" + ) + + return folder_contents.entries + + def get_text_representation(self, file_id: str = "") -> tuple[str, str, str]: + try: + from box_sdk_gen import BoxAPIError, BoxSDKError + except ImportError: + raise ImportError("You must run `pip install box-sdk-gen`") + + if self.box is None: + self.get_box_client() + + try: + file = self.box.files.get_file_by_id( # type: ignore[union-attr] + file_id, + x_rep_hints="[extracted_text]", + fields=["name", "representations", "type"], + ) + except BoxAPIError as bae: + raise RuntimeError(f"BoxAPIError: Error getting text rep: {bae.message}") + except BoxSDKError as bse: + raise RuntimeError(f"BoxSDKError: Error getting text rep: {bse.message}") + except Exception: + return None, None, None # type: ignore[return-value] + + file_repr = file.representations.entries + + if len(file_repr) <= 0: + return None, None, None # type: ignore[return-value] + + for entry in file_repr: + if entry.representation == "extracted_text": + # If the file representation doesn't exist, calling + # info.url will generate text if possible + if entry.status.state == "none": + self._do_request(entry.info.url) + + url = entry.content.url_template.replace("{+asset_path}", "") + file_name = file.name.replace(".", "_").replace(" ", "_") + + try: + raw_content = self._do_request(url) + except requests.exceptions.HTTPError: + return None, None, None # type: ignore[return-value] + + if self.character_limit > 0: # type: ignore[operator] + content = raw_content[0 : self.character_limit] + else: + content = raw_content + + return file_name, content, url + + return None, None, None # type: ignore[return-value] + + def get_document_by_file_id(self, file_id: str) -> Optional[Document]: + """Load a file from a Box id. Accepts file_id as str. + Returns `Document`""" + + if self.box is None: + self.get_box_client() + + file = self.box.files.get_file_by_id( # type: ignore[union-attr] + file_id, fields=["name", "type", "extension"] + ) + + if file.type == "file": + if hasattr(DocumentFiles, file.extension.upper()): + file_name, content, url = self.get_text_representation(file_id=file_id) + + if file_name is None or content is None or url is None: + return None + + metadata = { + "source": f"{url}", + "title": f"{file_name}", + } + + return Document(page_content=content, metadata=metadata) + + return None + + return None diff --git a/libs/partners/box/poetry.lock b/libs/partners/box/poetry.lock new file mode 100644 index 0000000000000..b765ec94db31f --- /dev/null +++ b/libs/partners/box/poetry.lock @@ -0,0 +1,984 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "box-sdk-gen" +version = "1.3.0" +description = "[Box Platform](https://box.dev) provides functionality to provide access to content stored within [Box](https://box.com). It provides endpoints for basic manipulation of files and folders, management of users within an enterprise, as well as more complex topics such as legal holds and retention policies." +optional = false +python-versions = "*" +files = [ + {file = "box_sdk_gen-1.3.0-py3-none-any.whl", hash = "sha256:9b3d5a8196869323031eff49d46c85b9b7734353b8fba52614296369f4d24b7d"}, + {file = "box_sdk_gen-1.3.0.tar.gz", hash = "sha256:e0a183aecf5a10989023b12e253c758204b2b1bb902224421d06a7015ce8a1ac"}, +] + +[package.dependencies] +cryptography = {version = ">=3", optional = true, markers = "extra == \"jwt\""} +pyjwt = {version = ">=1.7.0", optional = true, markers = "extra == \"jwt\""} +requests = "*" +requests-toolbelt = "*" + +[package.extras] +dev = ["tox"] +jwt = ["cryptography (>=3)", "pyjwt (>=1.7.0)"] +test = ["cryptography (>=3)", "pyjwt (>=1.7.0)", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "certifi" +version = "2024.7.4" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, +] + +[[package]] +name = "cffi" +version = "1.17.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, + {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, + {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, + {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, + {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, + {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, + {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, + {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, + {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, + {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, + {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, + {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, + {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, + {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, + {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "codespell" +version = "2.3.0" +description = "Codespell" +optional = false +python-versions = ">=3.8" +files = [ + {file = "codespell-2.3.0-py3-none-any.whl", hash = "sha256:a9c7cef2501c9cfede2110fd6d4e5e62296920efe9abfb84648df866e47f58d1"}, + {file = "codespell-2.3.0.tar.gz", hash = "sha256:360c7d10f75e65f67bad720af7007e1060a5d395670ec11a7ed1fed9dd17471f"}, +] + +[package.extras] +dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] +hard-encoding-detection = ["chardet"] +toml = ["tomli"] +types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "cryptography" +version = "43.0.0" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"}, + {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"}, + {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"}, + {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"}, + {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"}, + {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"}, + {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"}, + {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"}, + {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"}, + {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"}, + {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "3.0.0" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, +] + +[[package]] +name = "langchain-core" +version = "0.2.28" +description = "Building applications with LLMs through composability" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [] +develop = true + +[package.dependencies] +jsonpatch = "^1.33" +langsmith = "^0.1.75" +packaging = ">=23.2,<25" +pydantic = [ + {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, + {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, +] +PyYAML = ">=5.3" +tenacity = "^8.1.0,!=8.4.0" +typing-extensions = ">=4.7" + +[package.source] +type = "directory" +url = "../../core" + +[[package]] +name = "langsmith" +version = "0.1.99" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langsmith-0.1.99-py3-none-any.whl", hash = "sha256:ef8d1d74a2674c514aa429b0171a9fbb661207dc3835142cca0e8f1bf97b26b0"}, + {file = "langsmith-0.1.99.tar.gz", hash = "sha256:b5c6a1f158abda61600a4a445081ee848b4a28b758d91f2793dc02aeffafcaf1"}, +] + +[package.dependencies] +orjson = ">=3.9.14,<4.0.0" +pydantic = [ + {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, + {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, +] +requests = ">=2,<3" + +[[package]] +name = "mypy" +version = "1.11.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, + {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, + {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, + {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, + {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, + {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, + {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, + {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, + {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, + {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, + {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, + {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, + {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, + {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, + {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, + {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, + {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, + {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, + {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "orjson" +version = "3.10.7" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84"}, + {file = "orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175"}, + {file = "orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c"}, + {file = "orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0"}, + {file = "orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f"}, + {file = "orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5"}, + {file = "orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b"}, + {file = "orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb"}, + {file = "orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1"}, + {file = "orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149"}, + {file = "orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad"}, + {file = "orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2"}, + {file = "orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024"}, + {file = "orjson-3.10.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866"}, + {file = "orjson-3.10.7-cp38-none-win32.whl", hash = "sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c"}, + {file = "orjson-3.10.7-cp38-none-win_amd64.whl", hash = "sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e"}, + {file = "orjson-3.10.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5"}, + {file = "orjson-3.10.7-cp39-none-win32.whl", hash = "sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2"}, + {file = "orjson-3.10.7-cp39-none-win_amd64.whl", hash = "sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58"}, + {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "2.8.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.20.1" +typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.20.1" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyjwt" +version = "2.9.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.23.8" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, + {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "pytest-socket" +version = "0.7.0" +description = "Pytest Plugin to disable socket calls during tests" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pytest_socket-0.7.0-py3-none-any.whl", hash = "sha256:7e0f4642177d55d317bbd58fc68c6bd9048d6eadb2d46a89307fa9221336ce45"}, + {file = "pytest_socket-0.7.0.tar.gz", hash = "sha256:71ab048cbbcb085c15a4423b73b619a8b35d6a307f46f78ea46be51b1b7e11b3"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "ruff" +version = "0.1.15" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, +] + +[[package]] +name = "tenacity" +version = "8.5.0" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, + {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "types-requests" +version = "2.32.0.20240712" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.32.0.20240712.tar.gz", hash = "sha256:90c079ff05e549f6bf50e02e910210b98b8ff1ebdd18e19c873cd237737c1358"}, + {file = "types_requests-2.32.0.20240712-py3-none-any.whl", hash = "sha256:f754283e152c752e46e70942fa2a146b5bc70393522257bb85bd1ef7e019dcc3"}, +] + +[package.dependencies] +urllib3 = ">=2" + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.9.0,<3.13" +content-hash = "2309e22ed71789020df9f24af1408d12054d125766a9c2295672b880f548c506" diff --git a/libs/partners/box/pyproject.toml b/libs/partners/box/pyproject.toml new file mode 100644 index 0000000000000..180e95d152a84 --- /dev/null +++ b/libs/partners/box/pyproject.toml @@ -0,0 +1,92 @@ +[tool.poetry] +name = "langchain-box" +version = "0.1.0" +description = "An integration package connecting Box and LangChain" +authors = [] +readme = "README.md" +repository = "https://github.com/langchain-ai/langchain" +license = "MIT" + +[tool.poetry.urls] +"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/box" +"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-box%3D%3D0%22&expanded=true" + +[tool.poetry.dependencies] +python = ">=3.9.0,<3.13" +langchain-core = "^0.2.0" +box-sdk-gen = {extras = ["jwt"], version = "^1.1.0"} + +[tool.poetry.group.test] +optional = true + +[tool.poetry.group.test.dependencies] +pytest = "^7.4.3" +pytest_mock = "^3.14.0" +pytest-asyncio = "^0.23.2" +pytest-socket = "^0.7.0" +langchain-core = { path = "../../core", develop = true } + +[tool.poetry.group.codespell] +optional = true + +[tool.poetry.group.codespell.dependencies] +codespell = "^2.2.6" + +[tool.poetry.group.test_integration] +optional = true + +[tool.poetry.group.test_integration.dependencies] +python-dotenv = "^1.0.1" + +[tool.poetry.group.lint] +optional = true + +[tool.poetry.group.lint.dependencies] +ruff = "^0.1.8" + +[tool.poetry.group.typing.dependencies] +mypy = "^1.7.1" +langchain-core = { path = "../../core", develop = true } +types-requests = "^2.32.0.20240712" + +[tool.poetry.group.dev] +optional = true + +[tool.poetry.group.dev.dependencies] +langchain-core = { path = "../../core", develop = true } + +[tool.ruff.lint] +select = [ + "E", # pycodestyle + "F", # pyflakes + "I", # isort + "T201", # print +] + +[tool.mypy] +disallow_untyped_defs = "True" + +[tool.coverage.run] +omit = ["tests/*"] + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +# --strict-markers will raise errors on unknown marks. +# https://docs.pytest.org/en/7.1.x/how-to/mark.html#raising-errors-on-unknown-marks +# +# https://docs.pytest.org/en/7.1.x/reference/reference.html +# --strict-config any warnings encountered while parsing the `pytest` +# section of the configuration file raise errors. +# +# https://github.com/tophat/syrupy +# --snapshot-warn-unused Prints a warning on unused snapshots rather than fail the test suite. +# addopts = "--snapshot-warn-unused --strict-markers --strict-config --durations=5" +# Registering custom markers. +# https://docs.pytest.org/en/7.1.x/example/markers.html#registering-markers +markers = [ + "compile: mark placeholder test used to compile integration tests without running them", +] +asyncio_mode = "auto" diff --git a/libs/partners/box/scripts/check_imports.py b/libs/partners/box/scripts/check_imports.py new file mode 100644 index 0000000000000..365f5fa118da4 --- /dev/null +++ b/libs/partners/box/scripts/check_imports.py @@ -0,0 +1,17 @@ +import sys +import traceback +from importlib.machinery import SourceFileLoader + +if __name__ == "__main__": + files = sys.argv[1:] + has_failure = False + for file in files: + try: + SourceFileLoader("x", file).load_module() + except Exception: + has_faillure = True + print(file) # noqa: T201 + traceback.print_exc() + print() # noqa: T201 + + sys.exit(1 if has_failure else 0) diff --git a/libs/partners/box/scripts/check_pydantic.sh b/libs/partners/box/scripts/check_pydantic.sh new file mode 100755 index 0000000000000..06b5bb81ae236 --- /dev/null +++ b/libs/partners/box/scripts/check_pydantic.sh @@ -0,0 +1,27 @@ +#!/bin/bash +# +# This script searches for lines starting with "import pydantic" or "from pydantic" +# in tracked files within a Git repository. +# +# Usage: ./scripts/check_pydantic.sh /path/to/repository + +# Check if a path argument is provided +if [ $# -ne 1 ]; then + echo "Usage: $0 /path/to/repository" + exit 1 +fi + +repository_path="$1" + +# Search for lines matching the pattern within the specified repository +result=$(git -C "$repository_path" grep -E '^import pydantic|^from pydantic') + +# Check if any matching lines were found +if [ -n "$result" ]; then + echo "ERROR: The following lines need to be updated:" + echo "$result" + echo "Please replace the code with an import from langchain_core.pydantic_v1." + echo "For example, replace 'from pydantic import BaseModel'" + echo "with 'from langchain_core.pydantic_v1 import BaseModel'" + exit 1 +fi diff --git a/libs/partners/box/scripts/lint_imports.sh b/libs/partners/box/scripts/lint_imports.sh new file mode 100755 index 0000000000000..19ccec1480c01 --- /dev/null +++ b/libs/partners/box/scripts/lint_imports.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +set -eu + +# Initialize a variable to keep track of errors +errors=0 + +# make sure not importing from langchain, langchain_experimental, or langchain_community +git --no-pager grep '^from langchain\.' . && errors=$((errors+1)) +git --no-pager grep '^from langchain_experimental\.' . && errors=$((errors+1)) +git --no-pager grep '^from langchain_community\.' . && errors=$((errors+1)) + +# Decide on an exit status based on the errors +if [ "$errors" -gt 0 ]; then + exit 1 +else + exit 0 +fi diff --git a/libs/partners/box/tests/__init__.py b/libs/partners/box/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/box/tests/integration_tests/__init__.py b/libs/partners/box/tests/integration_tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/box/tests/integration_tests/document_loaders/__init__.py b/libs/partners/box/tests/integration_tests/document_loaders/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/box/tests/integration_tests/document_loaders/test_box_file_loader.py b/libs/partners/box/tests/integration_tests/document_loaders/test_box_file_loader.py new file mode 100644 index 0000000000000..0146b60dcf615 --- /dev/null +++ b/libs/partners/box/tests/integration_tests/document_loaders/test_box_file_loader.py @@ -0,0 +1,42 @@ +from langchain_core.documents import Document +from pytest_mock import MockerFixture + +from langchain_box.document_loaders import BoxLoader + + +# test Document retrieval +def test_file_load(mocker: MockerFixture) -> None: + mocker.patch( + "langchain_box.utilities.BoxAPIWrapper.get_document_by_file_id", return_value=[] + ) + + loader = BoxLoader( # type: ignore[call-arg] + box_developer_token="box_developer_token", + box_file_ids=["box_file_ids"], + ) + + documents = loader.load() + assert documents + + mocker.patch( + "langchain_box.utilities.BoxAPIWrapper.get_document_by_file_id", + return_value=( + Document( + page_content="Test file mode\ndocument contents", + metadata={"title": "Testing Files"}, + ) + ), + ) + + loader = BoxLoader( # type: ignore[call-arg] + box_developer_token="box_developer_token", + box_file_ids=["box_file_ids"], + ) + + documents = loader.load() + assert documents == [ + Document( + page_content="Test file mode\ndocument contents", + metadata={"title": "Testing Files"}, + ) + ] diff --git a/libs/partners/box/tests/integration_tests/test_compile.py b/libs/partners/box/tests/integration_tests/test_compile.py new file mode 100644 index 0000000000000..5196f4c0f8c51 --- /dev/null +++ b/libs/partners/box/tests/integration_tests/test_compile.py @@ -0,0 +1,7 @@ +import pytest # type: ignore[import-not-found] + + +@pytest.mark.compile +def test_placeholder() -> None: + """Used for compiling integration tests without running any real tests.""" + pass diff --git a/libs/partners/box/tests/integration_tests/utilities/__init__.py b/libs/partners/box/tests/integration_tests/utilities/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/box/tests/integration_tests/utilities/test_box_util.py b/libs/partners/box/tests/integration_tests/utilities/test_box_util.py new file mode 100644 index 0000000000000..6403b2f54105e --- /dev/null +++ b/libs/partners/box/tests/integration_tests/utilities/test_box_util.py @@ -0,0 +1,47 @@ +from unittest.mock import Mock + +import pytest +from langchain_core.documents import Document +from pytest_mock import MockerFixture + +from langchain_box.utilities import BoxAPIWrapper + + +@pytest.fixture() +def mock_worker(mocker: MockerFixture) -> None: + mocker.patch("langchain_box.utilities.BoxAuth.authorize", return_value=Mock()) + mocker.patch("langchain_box.utilities.BoxAuth.get_client", return_value=Mock()) + mocker.patch( + "langchain_box.utilities.BoxAPIWrapper.get_text_representation", + return_value=("filename", "content", "url"), + ) + + +def test_get_documents_by_file_ids(mock_worker, mocker: MockerFixture) -> None: # type: ignore[no-untyped-def] + mocker.patch( + "langchain_box.utilities.BoxAPIWrapper.get_document_by_file_id", + return_value=( + Document( + page_content="content", metadata={"source": "url", "title": "filename"} + ) + ), + ) + + box = BoxAPIWrapper(box_developer_token="box_developer_token") # type: ignore[call-arg] + + documents = box.get_document_by_file_id("box_file_id") + assert documents == Document( + page_content="content", metadata={"source": "url", "title": "filename"} + ) + + +def test_get_documents_by_folder_id(mock_worker, mocker: MockerFixture) -> None: # type: ignore[no-untyped-def] + mocker.patch( + "langchain_box.utilities.BoxAPIWrapper.get_folder_items", + return_value=([{"id": "file_id", "type": "file"}]), + ) + + box = BoxAPIWrapper(box_developer_token="box_developer_token") # type: ignore[call-arg] + + folder_contents = box.get_folder_items("box_folder_id") + assert folder_contents == [{"id": "file_id", "type": "file"}] diff --git a/libs/partners/box/tests/unit_tests/__init__.py b/libs/partners/box/tests/unit_tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/box/tests/unit_tests/document_loaders/__init__.py b/libs/partners/box/tests/unit_tests/document_loaders/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/box/tests/unit_tests/document_loaders/test_box_loader.py b/libs/partners/box/tests/unit_tests/document_loaders/test_box_loader.py new file mode 100644 index 0000000000000..96e46f15e8be6 --- /dev/null +++ b/libs/partners/box/tests/unit_tests/document_loaders/test_box_loader.py @@ -0,0 +1,58 @@ +import pytest + +from langchain_box.document_loaders import BoxLoader +from langchain_box.utilities import BoxAuth, BoxAuthType + + +# Test auth types +def test_direct_token_initialization() -> None: + loader = BoxLoader( # type: ignore[call-arg] + box_developer_token="box_developer_token", + box_file_ids=["box_file_ids"], + ) + + assert loader.box_developer_token == "box_developer_token" + assert loader.box_file_ids == ["box_file_ids"] + + +def test_failed_direct_token_initialization() -> None: + with pytest.raises(ValueError): + loader = BoxLoader(box_file_ids=["box_file_ids"]) # type: ignore[call-arg] # noqa: F841 + + +def test_auth_initialization() -> None: + auth = BoxAuth( + auth_type=BoxAuthType.TOKEN, box_developer_token="box_developer_token" + ) + + loader = BoxLoader( # type: ignore[call-arg] + box_auth=auth, + box_file_ids=["box_file_ids"], + ) + + assert loader.box_file_ids == ["box_file_ids"] + + +# test loaders +def test_failed_file_initialization() -> None: + with pytest.raises(ValueError): + loader = BoxLoader(box_developer_token="box_developer_token") # type: ignore[call-arg] # noqa: F841 + + +def test_folder_initialization() -> None: + loader = BoxLoader( # type: ignore[call-arg] + box_developer_token="box_developer_token", + box_folder_id="box_folder_id", + ) + + assert loader.box_developer_token == "box_developer_token" + assert loader.box_folder_id == "box_folder_id" + + +def test_failed_initialization_files_and_folders() -> None: + with pytest.raises(ValueError): + loader = BoxLoader( # type: ignore[call-arg] # noqa: F841 + box_developer_token="box_developer_token", + box_folder_id="box_folder_id", + box_file_ids=["box_file_ids"], + ) diff --git a/libs/partners/box/tests/unit_tests/test_imports.py b/libs/partners/box/tests/unit_tests/test_imports.py new file mode 100644 index 0000000000000..83507ee1545c8 --- /dev/null +++ b/libs/partners/box/tests/unit_tests/test_imports.py @@ -0,0 +1,13 @@ +from langchain_box import __all__ + +EXPECTED_ALL = [ + "BoxLoader", + "BoxAuth", + "BoxAuthType", + "BoxAPIWrapper", + "__version__", +] + + +def test_all_imports() -> None: + assert sorted(EXPECTED_ALL) == sorted(__all__) diff --git a/libs/partners/box/tests/unit_tests/utilities/__init__.py b/libs/partners/box/tests/unit_tests/utilities/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/box/tests/unit_tests/utilities/test_box_util.py b/libs/partners/box/tests/unit_tests/utilities/test_box_util.py new file mode 100644 index 0000000000000..ec011f0032087 --- /dev/null +++ b/libs/partners/box/tests/unit_tests/utilities/test_box_util.py @@ -0,0 +1,101 @@ +import pytest +from pydantic.v1.error_wrappers import ValidationError + +from langchain_box.utilities import BoxAPIWrapper, BoxAuth, BoxAuthType + + +# Test auth types +def test_token_initialization() -> None: + auth = BoxAuth( + auth_type=BoxAuthType.TOKEN, box_developer_token="box_developer_token" + ) + + assert auth.auth_type == "token" + assert auth.box_developer_token == "box_developer_token" + + +def test_failed_token_initialization() -> None: + with pytest.raises(ValidationError): + auth = BoxAuth(auth_type=BoxAuthType.TOKEN) # noqa: F841 + + +def test_jwt_eid_initialization() -> None: + auth = BoxAuth(auth_type=BoxAuthType.JWT, box_jwt_path="box_jwt_path") + + assert auth.auth_type == "jwt" + assert auth.box_jwt_path == "box_jwt_path" + + +def test_jwt_user_initialization() -> None: + auth = BoxAuth( + auth_type=BoxAuthType.JWT, + box_jwt_path="box_jwt_path", + box_user_id="box_user_id", + ) + + assert auth.auth_type == "jwt" + assert auth.box_jwt_path == "box_jwt_path" + assert auth.box_user_id == "box_user_id" + + +def test_failed_jwt_initialization() -> None: + with pytest.raises(ValidationError): + auth = BoxAuth(auth_type=BoxAuthType.JWT, box_user_id="box_user_id") # noqa: F841 + + +def test_ccg_eid_initialization() -> None: + auth = BoxAuth( + auth_type=BoxAuthType.CCG, + box_client_id="box_client_id", + box_client_secret="box_client_secret", + box_enterprise_id="box_enterprise_id", + ) + + assert auth.auth_type == "ccg" + assert auth.box_client_id == "box_client_id" + assert auth.box_client_secret == "box_client_secret" + assert auth.box_enterprise_id == "box_enterprise_id" + + +def test_ccg_user_initialization() -> None: + auth = BoxAuth( + auth_type=BoxAuthType.CCG, + box_client_id="box_client_id", + box_client_secret="box_client_secret", + box_enterprise_id="box_enterprise_id", + box_user_id="box_user_id", + ) + + assert auth.auth_type == "ccg" + assert auth.box_client_id == "box_client_id" + assert auth.box_client_secret == "box_client_secret" + assert auth.box_enterprise_id == "box_enterprise_id" + assert auth.box_user_id == "box_user_id" + + +def test_failed_ccg_initialization() -> None: + with pytest.raises(ValidationError): + auth = BoxAuth(auth_type=BoxAuthType.CCG) # noqa: F841 + + +def test_direct_token_initialization() -> None: + box = BoxAPIWrapper( # type: ignore[call-arg] + box_developer_token="box_developer_token" + ) + + assert box.box_developer_token == "box_developer_token" + + +def test_auth_initialization() -> None: + auth = BoxAuth( + auth_type=BoxAuthType.TOKEN, box_developer_token="box_developer_token" + ) + + box = BoxAPIWrapper(box_auth=auth) # type: ignore[call-arg] # noqa: F841 + + assert auth.box_developer_token == "box_developer_token" + + +def test_failed_initialization_no_auth() -> None: + with pytest.raises(ValidationError): + box = BoxAPIWrapper() # type: ignore[call-arg] # noqa: F841 diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 0000000000000..fb57ccd13afbd --- /dev/null +++ b/yarn.lock @@ -0,0 +1,4 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + From e7d6b25653da5adf00500d169c6822fd483b4fa7 Mon Sep 17 00:00:00 2001 From: wangda <38549158+daziz@users.noreply.github.com> Date: Wed, 21 Aug 2024 20:49:12 +0800 Subject: [PATCH 33/80] docs:Correcting spelling mistakes (#25612) --- docs/docs/integrations/providers/marqo.md | 2 +- docs/docs/integrations/providers/premai.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docs/integrations/providers/marqo.md b/docs/docs/integrations/providers/marqo.md index 106db08599dcf..1e18569309ee2 100644 --- a/docs/docs/integrations/providers/marqo.md +++ b/docs/docs/integrations/providers/marqo.md @@ -21,7 +21,7 @@ To run Marqo locally with our docker image, [see our getting started.](https://d There exists a wrapper around Marqo indexes, allowing you to use them within the vectorstore framework. Marqo lets you select from a range of models for generating embeddings and exposes some preprocessing configurations. -The Marqo vectorstore can also work with existing multimodel indexes where your documents have a mix of images and text, for more information refer to [our documentation](https://docs.marqo.ai/latest/#multi-modal-and-cross-modal-search). Note that instaniating the Marqo vectorstore with an existing multimodal index will disable the ability to add any new documents to it via the langchain vectorstore `add_texts` method. +The Marqo vectorstore can also work with existing multimodal indexes where your documents have a mix of images and text, for more information refer to [our documentation](https://docs.marqo.ai/latest/#multi-modal-and-cross-modal-search). Note that instantiating the Marqo vectorstore with an existing multimodal index will disable the ability to add any new documents to it via the langchain vectorstore `add_texts` method. To import this vectorstore: ```python diff --git a/docs/docs/integrations/providers/premai.md b/docs/docs/integrations/providers/premai.md index 7bf88d1fd0208..4dc66d808812e 100644 --- a/docs/docs/integrations/providers/premai.md +++ b/docs/docs/integrations/providers/premai.md @@ -319,7 +319,7 @@ def multiply(a: int, b: int) -> int: ### Binding tool schemas with our LLM -We will now use the `bind_tools` method to convert our above functions to a "tool" and binding it with the model. This means we are going to pass these tool informations everytime we invoke the model. +We will now use the `bind_tools` method to convert our above functions to a "tool" and binding it with the model. This means we are going to pass these tool information everytime we invoke the model. ```python tools = [add, multiply] From 34d0417eb5e1e98a774ee00ec628d016aacfa456 Mon Sep 17 00:00:00 2001 From: ZhangShenao <15201440436@163.com> Date: Wed, 21 Aug 2024 20:58:00 +0800 Subject: [PATCH 34/80] Improvement[Doc] Improve api doc in of `PineconeVectorStore` (#25605) Complete missing arguments in api doc of `PineconeVectorStore`. --- .../pinecone/langchain_pinecone/vectorstores.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/libs/partners/pinecone/langchain_pinecone/vectorstores.py b/libs/partners/pinecone/langchain_pinecone/vectorstores.py index d1cadfcf217d1..c9adb085ece0d 100644 --- a/libs/partners/pinecone/langchain_pinecone/vectorstores.py +++ b/libs/partners/pinecone/langchain_pinecone/vectorstores.py @@ -249,6 +249,7 @@ def add_texts( namespace: Optional pinecone namespace to add the texts to. batch_size: Batch size to use when adding the texts to the vectorstore. embedding_chunk_size: Chunk size to use when embedding the texts. + async_req: Whether runs asynchronously. id_prefix: Optional string to use as an ID prefix when upserting vectors. Returns: @@ -427,6 +428,9 @@ def max_marginal_relevance_search_by_vector( of diversity among the results with 0 corresponding to maximum diversity and 1 to minimum diversity. Defaults to 0.5. + filter: Dictionary of argument(s) to filter on metadata + namespace: Namespace to search in. Default will search in '' namespace. + Returns: List of Documents selected by maximal marginal relevance. """ @@ -475,6 +479,9 @@ def max_marginal_relevance_search( of diversity among the results with 0 corresponding to maximum diversity and 1 to minimum diversity. Defaults to 0.5. + filter: Dictionary of argument(s) to filter on metadata + namespace: Namespace to search in. Default will search in '' namespace. + Returns: List of Documents selected by maximal marginal relevance. """ @@ -496,6 +503,7 @@ def get_pinecone_index( Args: index_name: Name of the index to use. pool_threads: Number of threads to use for index upsert. + pinecone_api_key: The api_key of Pinecone. Returns: Pinecone Index instance.""" _pinecone_api_key = pinecone_api_key or os.environ.get("PINECONE_API_KEY") or "" @@ -541,7 +549,7 @@ def from_texts( ) -> PineconeVectorStore: """Construct Pinecone wrapper from raw documents. - This is a user friendly interface that: + This is a user-friendly interface that: 1. Embeds documents. 2. Adds the documents to a provided Pinecone index @@ -606,7 +614,9 @@ def delete( """Delete by vector IDs or filter. Args: ids: List of ids to delete. + delete_all: Whether delete all vectors in the index. filter: Dictionary of conditions to filter vectors to delete. + namespace: Namespace to search in. Default will search in '' namespace. """ if namespace is None: From b002702af6103f3da31662c7e4171d9b383bdb50 Mon Sep 17 00:00:00 2001 From: Dristy Srivastava <58721149+dristysrivastava@users.noreply.github.com> Date: Wed, 21 Aug 2024 18:40:14 +0530 Subject: [PATCH 35/80] [Community][minor]: Updating metadata with full_path in SharePoint loader (#25593) - **Description:** Updating metadata for sharepoint loader with full path i.e., webUrl - **Issue:** NA - **Dependencies:** NA - **Tests:** NA - **Docs** NA Co-authored-by: dristy.cd Co-authored-by: ccurme --- .../langchain_community/document_loaders/sharepoint.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/libs/community/langchain_community/document_loaders/sharepoint.py b/libs/community/langchain_community/document_loaders/sharepoint.py index 41ccf0147e680..e589a58447c4d 100644 --- a/libs/community/langchain_community/document_loaders/sharepoint.py +++ b/libs/community/langchain_community/document_loaders/sharepoint.py @@ -78,6 +78,7 @@ def lazy_load(self) -> Iterator[Document]: auth_identities = self.authorized_identities(file_id) if self.load_extended_metadata is True: extended_metadata = self.get_extended_metadata(file_id) + extended_metadata.update({"source_full_url": target_folder.web_url}) for parsed_blob in blob_parser.lazy_parse(blob): if self.load_auth is True: parsed_blob.metadata["authorized_identities"] = auth_identities @@ -94,6 +95,7 @@ def lazy_load(self) -> Iterator[Document]: auth_identities = self.authorized_identities(file_id) if self.load_extended_metadata is True: extended_metadata = self.get_extended_metadata(file_id) + extended_metadata.update({"source_full_url": target_folder.web_url}) for parsed_blob in blob_parser.lazy_parse(blob): if self.load_auth is True: parsed_blob.metadata["authorized_identities"] = auth_identities @@ -130,6 +132,9 @@ def lazy_load(self) -> Iterator[Document]: blob_part.metadata["authorized_identities"] = auth_identities if self.load_extended_metadata is True: blob_part.metadata.update(extended_metadata) + blob_part.metadata.update( + {"source_full_url": target_folder.web_url} + ) yield blob_part def authorized_identities(self, file_id: str) -> List: From d457d7d121028f59a20d03a6c200ec80f501400f Mon Sep 17 00:00:00 2001 From: Mikhail Khludnev Date: Wed, 21 Aug 2024 16:45:00 +0300 Subject: [PATCH 36/80] docs: Update qdrant.ipynb "BM25".lower() (#25616) Otherwise I've got KeyError from `fastembeds` --- docs/docs/integrations/vectorstores/qdrant.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docs/integrations/vectorstores/qdrant.ipynb b/docs/docs/integrations/vectorstores/qdrant.ipynb index 42b456b5a979f..1c1731d46076e 100644 --- a/docs/docs/integrations/vectorstores/qdrant.ipynb +++ b/docs/docs/integrations/vectorstores/qdrant.ipynb @@ -532,7 +532,7 @@ "source": [ "from langchain_qdrant import FastEmbedSparse, RetrievalMode\n", "\n", - "sparse_embeddings = FastEmbedSparse(model_name=\"Qdrant/BM25\")\n", + "sparse_embeddings = FastEmbedSparse(model_name=\"Qdrant/bm25\")\n", "\n", "qdrant = QdrantVectorStore.from_documents(\n", " docs,\n", @@ -571,7 +571,7 @@ "source": [ "from langchain_qdrant import FastEmbedSparse, RetrievalMode\n", "\n", - "sparse_embeddings = FastEmbedSparse(model_name=\"Qdrant/BM25\")\n", + "sparse_embeddings = FastEmbedSparse(model_name=\"Qdrant/bm25\")\n", "\n", "qdrant = QdrantVectorStore.from_documents(\n", " docs,\n", From 10a2ce2a26fc99b4c45d5e07c3681f174f81cb35 Mon Sep 17 00:00:00 2001 From: ccurme Date: Wed, 21 Aug 2024 10:26:25 -0400 Subject: [PATCH 37/80] together[patch]: use mixtral in standard integration tests (#25619) Mistral 7B occasionally fails tool-calling tests. Updating to Mixtral appears to improve this. --- .../tests/integration_tests/test_chat_models_standard.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/partners/together/tests/integration_tests/test_chat_models_standard.py b/libs/partners/together/tests/integration_tests/test_chat_models_standard.py index 18c167f8a91dc..f4c8cc7002de6 100644 --- a/libs/partners/together/tests/integration_tests/test_chat_models_standard.py +++ b/libs/partners/together/tests/integration_tests/test_chat_models_standard.py @@ -24,7 +24,7 @@ def chat_model_class(self) -> Type[BaseChatModel]: @property def chat_model_params(self) -> dict: return { - "model": "mistralai/Mistral-7B-Instruct-v0.1", + "model": "mistralai/Mixtral-8x7B-Instruct-v0.1", "rate_limiter": rate_limiter, } From a78843bb775210b5051dae7ec0a532cbe785ce21 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Wed, 21 Aug 2024 08:12:42 -0700 Subject: [PATCH 38/80] docs: how to use langsmith few shot (#25601) Requires langsmith 0.1.101 release --- .../how_to/example_selectors_langsmith.ipynb | 320 ++++++++++++++++++ docs/docs/how_to/index.mdx | 1 + 2 files changed, 321 insertions(+) create mode 100644 docs/docs/how_to/example_selectors_langsmith.ipynb diff --git a/docs/docs/how_to/example_selectors_langsmith.ipynb b/docs/docs/how_to/example_selectors_langsmith.ipynb new file mode 100644 index 0000000000000..7ca5d136876b6 --- /dev/null +++ b/docs/docs/how_to/example_selectors_langsmith.ipynb @@ -0,0 +1,320 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "4f7e423b", + "metadata": {}, + "source": [ + "# How to select examples from a LangSmith dataset\n", + "\n", + "import Prerequisites from \"@theme/Prerequisites\";\n", + "import Compatibility from \"@theme/Compatibility\";\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "LangSmith datasets have built-in support for similarity search, making them a great tool for building and querying few-shot examples.\n", + "\n", + "In this guide we'll see how to use an indexed LangSmith dataset as a few-shot example selector.\n", + "\n", + "## Setup\n", + "\n", + "Before getting started make sure you've [created a LangSmith account](https://smith.langchain.com/) and set your credentials:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "85445e0e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Set LangSmith API key:\n", + "\n", + "········\n" + ] + } + ], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "if not os.environ.get(\"LANGSMITH_API_KEY\"):\n", + " os.environ[\"LANGSMITH_API_KEY\"] = getpass.getpass(\"Set LangSmith API key:\\n\\n\")\n", + "\n", + "os.environ[\"LANGSMITH_TRACING\"] = \"true\"" + ] + }, + { + "cell_type": "markdown", + "id": "ca899e29", + "metadata": {}, + "source": [ + "We'll need to install the `langsmith` SDK. In this example we'll also make use of `langchain`, `langchain-openai`, and `langchain-benchmarks`:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b4fa7810", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install -qU langsmith>=0.1.101 langchain langchain-openai langchain-benchmarks" + ] + }, + { + "cell_type": "markdown", + "id": "fc716e12", + "metadata": {}, + "source": [ + "Now we'll clone a public dataset and turn on indexing for the dataset. We can also turn on indexing via the [LangSmith UI](https://docs.smith.langchain.com/how_to_guides/datasets/index_datasets_for_dynamic_few_shot_example_selection).\n", + "\n", + "We'll clone the [Multiverse math few shot example dataset](https://blog.langchain.dev/few-shot-prompting-to-improve-tool-calling-performance/).\n", + "\n", + "This enables searching over the dataset, and will make sure that anytime we update/add examples they are also indexed." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cf53d280", + "metadata": {}, + "outputs": [], + "source": [ + "from langsmith import AsyncClient as AsyncLangSmith\n", + "from langsmith import Client as LangSmith\n", + "\n", + "ls_client = LangSmith()\n", + "async_ls_client = AsyncLangSmith()\n", + "\n", + "dataset_name = \"multiverse-math-examples-for-few-shot\"\n", + "dataset_public_url = (\n", + " \"https://smith.langchain.com/public/0df59e49-d226-4ef2-9ecd-8c0fc9cd0288/d\"\n", + ")\n", + "\n", + "ls_client.clone_public_dataset(dataset_public_url)\n", + "\n", + "dataset_id = ls_client.read_dataset(dataset_name=dataset_name).id\n", + "ls_client.index_dataset(dataset_id=dataset_id)" + ] + }, + { + "cell_type": "markdown", + "id": "5767d171", + "metadata": {}, + "source": [ + "Indexing can take a few seconds. Once the dataset is indexed, we can search for similar examples like so:" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "id": "5013a56f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "3" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "examples = ls_client.similar_examples(\n", + " {\"input\": \"whats the negation of the negation of the negation of 3\"},\n", + " limit=3,\n", + " dataset_id=dataset_id,\n", + ")\n", + "len(examples)" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "id": "a142db06", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'evaluate the negation of -100'" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "examples[0].inputs[\"input\"]" + ] + }, + { + "cell_type": "markdown", + "id": "d2627125", + "metadata": {}, + "source": [ + "For this dataset the outputs are an entire chat history:" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "id": "af5b9191", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "9" + ] + }, + "execution_count": 33, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(examples[1].outputs[\"output\"])" + ] + }, + { + "cell_type": "markdown", + "id": "e852c8ef", + "metadata": {}, + "source": [ + "The search returns the examples whose inputs are most similar to the query input. We can use this for few-shot prompting a model like so:" + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "id": "12cba1e1", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.chat_models import init_chat_model\n", + "from langchain_benchmarks.tool_usage.tasks.multiverse_math import (\n", + " add,\n", + " cos,\n", + " divide,\n", + " log,\n", + " multiply,\n", + " negate,\n", + " pi,\n", + " power,\n", + " sin,\n", + " subtract,\n", + ")\n", + "from langchain_core.messages import HumanMessage, SystemMessage, convert_to_messages\n", + "from langchain_core.runnables import RunnableLambda\n", + "\n", + "\n", + "def similar_examples(input_: dict) -> dict:\n", + " examples = ls_client.similar_examples(input_, limit=5, dataset_id=dataset_id)\n", + " return {**input_, \"examples\": examples}\n", + "\n", + "\n", + "async def asimilar_examples(input_: dict) -> dict:\n", + " examples = await async_ls_client.similar_examples(\n", + " input_, limit=5, dataset_id=dataset_id\n", + " )\n", + " return {**input_, \"examples\": examples}\n", + "\n", + "\n", + "def construct_prompt(input_: dict) -> list:\n", + " instructions = \"\"\"You are great at using mathematical tools.\"\"\"\n", + " messages = []\n", + " for ex in input_[\"examples\"]:\n", + " # For this dataset, a multi-turn conversation is stored as output.\n", + " messages.extend(convert_to_messages(ex.outputs[\"output\"]))\n", + " examples = [msg for msg in messages if not isinstance(msg, SystemMessage)]\n", + " for ex in examples:\n", + " ex.name = (\n", + " \"example_user\" if isinstance(ex, HumanMessage) else \"example_assistant\"\n", + " )\n", + " return [SystemMessage(instructions), *examples, HumanMessage(input_[\"input\"])]\n", + "\n", + "\n", + "tools = [add, cos, divide, log, multiply, negate, pi, power, sin, subtract]\n", + "llm = init_chat_model(\"gpt-4o\")\n", + "llm_with_tools = llm.bind_tools(tools)\n", + "\n", + "example_selector = RunnableLambda(func=similar_examples, afunc=asimilar_examples)\n", + "\n", + "chain = example_selector | construct_prompt | llm_with_tools" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "id": "c423b367", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[{'name': 'negate',\n", + " 'args': {'a': 3},\n", + " 'id': 'call_ehmx3Z4Cj6HFpI8FV4pYZ5Oo',\n", + " 'type': 'tool_call'}]" + ] + }, + "execution_count": 52, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ai_msg = await chain.ainvoke({\"input\": \"whats the negation of the negation of 3\"})\n", + "ai_msg.tool_calls" + ] + }, + { + "cell_type": "markdown", + "id": "94489b4a", + "metadata": {}, + "source": [ + "Looking at the LangSmith trace, we can see that relevant examples were pulled in in the `similar_examples` step and passed as messages to ChatOpenAI: https://smith.langchain.com/public/05af2ce8-1a45-4f3a-8d54-6524ff919279/r." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "poetry-venv-311", + "language": "python", + "name": "poetry-venv-311" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/how_to/index.mdx b/docs/docs/how_to/index.mdx index 05f90a7d24dbd..b7ab308dff4de 100644 --- a/docs/docs/how_to/index.mdx +++ b/docs/docs/how_to/index.mdx @@ -69,6 +69,7 @@ These are the core building blocks you can use when building applications. - [How to: select examples by semantic similarity](/docs/how_to/example_selectors_similarity) - [How to: select examples by semantic ngram overlap](/docs/how_to/example_selectors_ngram) - [How to: select examples by maximal marginal relevance](/docs/how_to/example_selectors_mmr) +- [How to: select examples from LangSmith few-shot datasets](/docs/how_to/example_selectors_langsmith/) ### Chat models From 0bc3845e1e0c4e9107520ae8c52b39f9d7bcef7e Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Wed, 21 Aug 2024 09:13:15 -0700 Subject: [PATCH 39/80] core[patch]: support oai dicts as messages (#25621) and update langsmtih example selector docs --- .../how_to/example_selectors_langsmith.ipynb | 102 +++++++++++------- libs/core/langchain_core/messages/utils.py | 22 +++- .../output_parsers/openai_tools.py | 8 +- 3 files changed, 87 insertions(+), 45 deletions(-) diff --git a/docs/docs/how_to/example_selectors_langsmith.ipynb b/docs/docs/how_to/example_selectors_langsmith.ipynb index 7ca5d136876b6..d8e7a90546594 100644 --- a/docs/docs/how_to/example_selectors_langsmith.ipynb +++ b/docs/docs/how_to/example_selectors_langsmith.ipynb @@ -18,7 +18,7 @@ "\n", "\n", "\n", "\n", "\n", @@ -33,7 +33,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 2, "id": "85445e0e", "metadata": {}, "outputs": [ @@ -72,7 +72,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install -qU langsmith>=0.1.101 langchain langchain-openai langchain-benchmarks" + "%pip install -qU langsmith>=0.1.100 langchain langchain-openai langchain-benchmarks" ] }, { @@ -84,7 +84,7 @@ "\n", "We'll clone the [Multiverse math few shot example dataset](https://blog.langchain.dev/few-shot-prompting-to-improve-tool-calling-performance/).\n", "\n", - "This enables searching over the dataset, and will make sure that anytime we update/add examples they are also indexed." + "This enables searching over the dataset and will make sure that anytime we update/add examples they are also indexed." ] }, { @@ -94,20 +94,19 @@ "metadata": {}, "outputs": [], "source": [ - "from langsmith import AsyncClient as AsyncLangSmith\n", "from langsmith import Client as LangSmith\n", "\n", "ls_client = LangSmith()\n", - "async_ls_client = AsyncLangSmith()\n", "\n", - "dataset_name = \"multiverse-math-examples-for-few-shot\"\n", + "dataset_name = \"multiverse-math-few-shot-examples-v2\"\n", "dataset_public_url = (\n", - " \"https://smith.langchain.com/public/0df59e49-d226-4ef2-9ecd-8c0fc9cd0288/d\"\n", + " \"https://smith.langchain.com/public/620596ee-570b-4d2b-8c8f-f828adbe5242/d\"\n", ")\n", "\n", "ls_client.clone_public_dataset(dataset_public_url)\n", "\n", "dataset_id = ls_client.read_dataset(dataset_name=dataset_name).id\n", + "\n", "ls_client.index_dataset(dataset_id=dataset_id)" ] }, @@ -116,12 +115,12 @@ "id": "5767d171", "metadata": {}, "source": [ - "Indexing can take a few seconds. Once the dataset is indexed, we can search for similar examples like so:" + "Indexing can take a few seconds. Once the dataset is indexed, we can search for similar examples. Note that the input to the `similar_examples` method must have the same schema as the examples inputs. In this case our example inputs are a dictionary with a \"question\" key:" ] }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 12, "id": "5013a56f", "metadata": {}, "outputs": [ @@ -131,14 +130,14 @@ "3" ] }, - "execution_count": 29, + "execution_count": 12, "metadata": {}, "output_type": "execute_result" } ], "source": [ "examples = ls_client.similar_examples(\n", - " {\"input\": \"whats the negation of the negation of the negation of 3\"},\n", + " {\"question\": \"whats the negation of the negation of the negation of 3\"},\n", " limit=3,\n", " dataset_id=dataset_id,\n", ")\n", @@ -147,7 +146,7 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 13, "id": "a142db06", "metadata": {}, "outputs": [ @@ -157,13 +156,13 @@ "'evaluate the negation of -100'" ] }, - "execution_count": 34, + "execution_count": 13, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "examples[0].inputs[\"input\"]" + "examples[0].inputs[\"question\"]" ] }, { @@ -171,28 +170,51 @@ "id": "d2627125", "metadata": {}, "source": [ - "For this dataset the outputs are an entire chat history:" + "For this dataset, the outputs are the conversation that followed the question in OpenAI message format:" ] }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 14, "id": "af5b9191", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "9" + "[{'role': 'assistant',\n", + " 'content': None,\n", + " 'tool_calls': [{'id': 'toolu_01HTpq4cYNUac6F7omUc2Wz3',\n", + " 'type': 'function',\n", + " 'function': {'name': 'negate', 'arguments': '{\"a\": -100}'}}]},\n", + " {'role': 'tool',\n", + " 'content': '-100.0',\n", + " 'tool_call_id': 'toolu_01HTpq4cYNUac6F7omUc2Wz3'},\n", + " {'role': 'assistant', 'content': 'So the answer is 100.'},\n", + " {'role': 'user',\n", + " 'content': '100 is incorrect. Please refer to the output of your tool call.'},\n", + " {'role': 'assistant',\n", + " 'content': [{'text': \"You're right, my previous answer was incorrect. Let me re-evaluate using the tool output:\",\n", + " 'type': 'text'}],\n", + " 'tool_calls': [{'id': 'toolu_01XsJQboYghGDygQpPjJkeRq',\n", + " 'type': 'function',\n", + " 'function': {'name': 'negate', 'arguments': '{\"a\": -100}'}}]},\n", + " {'role': 'tool',\n", + " 'content': '-100.0',\n", + " 'tool_call_id': 'toolu_01XsJQboYghGDygQpPjJkeRq'},\n", + " {'role': 'assistant', 'content': 'The answer is -100.0'},\n", + " {'role': 'user',\n", + " 'content': 'You have the correct numerical answer but are returning additional text. Please only respond with the numerical answer.'},\n", + " {'role': 'assistant', 'content': '-100.0'}]" ] }, - "execution_count": 33, + "execution_count": 14, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "len(examples[1].outputs[\"output\"])" + "examples[0].outputs[\"conversation\"]" ] }, { @@ -205,7 +227,7 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": 20, "id": "12cba1e1", "metadata": {}, "outputs": [], @@ -223,8 +245,10 @@ " sin,\n", " subtract,\n", ")\n", - "from langchain_core.messages import HumanMessage, SystemMessage, convert_to_messages\n", "from langchain_core.runnables import RunnableLambda\n", + "from langsmith import AsyncClient as AsyncLangSmith\n", + "\n", + "async_ls_client = AsyncLangSmith()\n", "\n", "\n", "def similar_examples(input_: dict) -> dict:\n", @@ -241,20 +265,24 @@ "\n", "def construct_prompt(input_: dict) -> list:\n", " instructions = \"\"\"You are great at using mathematical tools.\"\"\"\n", - " messages = []\n", + " examples = []\n", " for ex in input_[\"examples\"]:\n", - " # For this dataset, a multi-turn conversation is stored as output.\n", - " messages.extend(convert_to_messages(ex.outputs[\"output\"]))\n", - " examples = [msg for msg in messages if not isinstance(msg, SystemMessage)]\n", - " for ex in examples:\n", - " ex.name = (\n", - " \"example_user\" if isinstance(ex, HumanMessage) else \"example_assistant\"\n", - " )\n", - " return [SystemMessage(instructions), *examples, HumanMessage(input_[\"input\"])]\n", + " examples.append({\"role\": \"user\", \"content\": ex.inputs[\"question\"]})\n", + " for msg in ex.outputs[\"conversation\"]:\n", + " if msg[\"role\"] == \"assistant\":\n", + " msg[\"name\"] = \"example_assistant\"\n", + " if msg[\"role\"] == \"user\":\n", + " msg[\"name\"] = \"example_user\"\n", + " examples.append(msg)\n", + " return [\n", + " {\"role\": \"system\", \"content\": instructions},\n", + " *examples,\n", + " {\"role\": \"user\", \"content\": input_[\"question\"]},\n", + " ]\n", "\n", "\n", "tools = [add, cos, divide, log, multiply, negate, pi, power, sin, subtract]\n", - "llm = init_chat_model(\"gpt-4o\")\n", + "llm = init_chat_model(\"gpt-4o-2024-08-06\")\n", "llm_with_tools = llm.bind_tools(tools)\n", "\n", "example_selector = RunnableLambda(func=similar_examples, afunc=asimilar_examples)\n", @@ -264,7 +292,7 @@ }, { "cell_type": "code", - "execution_count": 52, + "execution_count": 21, "id": "c423b367", "metadata": {}, "outputs": [ @@ -273,17 +301,17 @@ "text/plain": [ "[{'name': 'negate',\n", " 'args': {'a': 3},\n", - " 'id': 'call_ehmx3Z4Cj6HFpI8FV4pYZ5Oo',\n", + " 'id': 'call_uMSdoTl6ehfHh5a6JQUb2NoZ',\n", " 'type': 'tool_call'}]" ] }, - "execution_count": 52, + "execution_count": 21, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "ai_msg = await chain.ainvoke({\"input\": \"whats the negation of the negation of 3\"})\n", + "ai_msg = await chain.ainvoke({\"question\": \"whats the negation of the negation of 3\"})\n", "ai_msg.tool_calls" ] }, @@ -292,7 +320,7 @@ "id": "94489b4a", "metadata": {}, "source": [ - "Looking at the LangSmith trace, we can see that relevant examples were pulled in in the `similar_examples` step and passed as messages to ChatOpenAI: https://smith.langchain.com/public/05af2ce8-1a45-4f3a-8d54-6524ff919279/r." + "Looking at the LangSmith trace, we can see that relevant examples were pulled in in the `similar_examples` step and passed as messages to ChatOpenAI: https://smith.langchain.com/public/9585e30f-765a-4ed9-b964-2211420cd2f8/r." ] } ], diff --git a/libs/core/langchain_core/messages/utils.py b/libs/core/langchain_core/messages/utils.py index 18225c53228df..c7d4d58a149bf 100644 --- a/libs/core/langchain_core/messages/utils.py +++ b/libs/core/langchain_core/messages/utils.py @@ -10,6 +10,7 @@ from __future__ import annotations import inspect +import json from functools import partial from typing import ( TYPE_CHECKING, @@ -213,7 +214,23 @@ def _create_message_from_message_type( if id is not None: kwargs["id"] = id if tool_calls is not None: - kwargs["tool_calls"] = tool_calls + kwargs["tool_calls"] = [] + for tool_call in tool_calls: + # Convert OpenAI-format tool call to LangChain format. + if "function" in tool_call: + args = tool_call["function"]["arguments"] + if isinstance(args, str): + args = json.loads(args, strict=False) + kwargs["tool_calls"].append( + { + "name": tool_call["function"]["name"], + "args": args, + "id": tool_call["id"], + "type": "tool_call", + } + ) + else: + kwargs["tool_calls"].append(tool_call) if message_type in ("human", "user"): message: BaseMessage = HumanMessage(content=content, **kwargs) elif message_type in ("ai", "assistant"): @@ -271,7 +288,8 @@ def _convert_to_message(message: MessageLikeRepresentation) -> BaseMessage: msg_type = msg_kwargs.pop("role") except KeyError: msg_type = msg_kwargs.pop("type") - msg_content = msg_kwargs.pop("content") + # None msg content is not allowed + msg_content = msg_kwargs.pop("content") or "" except KeyError: raise ValueError( f"Message dict must contain 'role' and 'content' keys, got {message}" diff --git a/libs/core/langchain_core/output_parsers/openai_tools.py b/libs/core/langchain_core/output_parsers/openai_tools.py index 11bb5518cbe5d..a20707eb14b07 100644 --- a/libs/core/langchain_core/output_parsers/openai_tools.py +++ b/libs/core/langchain_core/output_parsers/openai_tools.py @@ -5,12 +5,8 @@ from langchain_core.exceptions import OutputParserException from langchain_core.messages import AIMessage, InvalidToolCall -from langchain_core.messages.tool import ( - invalid_tool_call, -) -from langchain_core.messages.tool import ( - tool_call as create_tool_call, -) +from langchain_core.messages.tool import invalid_tool_call +from langchain_core.messages.tool import tool_call as create_tool_call from langchain_core.output_parsers.transform import BaseCumulativeTransformOutputParser from langchain_core.outputs import ChatGeneration, Generation from langchain_core.pydantic_v1 import ValidationError From 628574b9c28b13c278d1b26f0cb1d1f6a3e625c1 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Wed, 21 Aug 2024 09:26:51 -0700 Subject: [PATCH 40/80] core[patch]: Release 0.2.34 (#25622) --- libs/core/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/core/pyproject.toml b/libs/core/pyproject.toml index b2e19e7563f72..c420a5265a2a0 100644 --- a/libs/core/pyproject.toml +++ b/libs/core/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "langchain-core" -version = "0.2.33" +version = "0.2.34" description = "Building applications with LLMs through composability" authors = [] license = "MIT" From 71c2ec678226d182653ddc44c3b5c9e5eb987a33 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Wed, 21 Aug 2024 09:44:25 -0700 Subject: [PATCH 41/80] docs: langsmith few shot prereq (#25623) --- docs/docs/how_to/example_selectors_langsmith.ipynb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/docs/how_to/example_selectors_langsmith.ipynb b/docs/docs/how_to/example_selectors_langsmith.ipynb index d8e7a90546594..e08af1b988799 100644 --- a/docs/docs/how_to/example_selectors_langsmith.ipynb +++ b/docs/docs/how_to/example_selectors_langsmith.ipynb @@ -19,6 +19,7 @@ "\n", "\n", "\n", "\n", @@ -72,7 +73,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install -qU langsmith>=0.1.100 langchain langchain-openai langchain-benchmarks" + "%pip install -qU \"langsmith>=0.1.100\" \"langchain-core>=0.2.34\" langchain langchain-openai langchain-benchmarks" ] }, { From 4feda41ab6ef36eb111edc0dd96841d5a8018f96 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Wed, 21 Aug 2024 10:18:08 -0700 Subject: [PATCH 42/80] docs: ls how to link (#25624) --- docs/docs/how_to/example_selectors_langsmith.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/how_to/example_selectors_langsmith.ipynb b/docs/docs/how_to/example_selectors_langsmith.ipynb index e08af1b988799..2e7ab9a5bcbc0 100644 --- a/docs/docs/how_to/example_selectors_langsmith.ipynb +++ b/docs/docs/how_to/example_selectors_langsmith.ipynb @@ -321,7 +321,7 @@ "id": "94489b4a", "metadata": {}, "source": [ - "Looking at the LangSmith trace, we can see that relevant examples were pulled in in the `similar_examples` step and passed as messages to ChatOpenAI: https://smith.langchain.com/public/9585e30f-765a-4ed9-b964-2211420cd2f8/r." + "Looking at the LangSmith trace, we can see that relevant examples were pulled in in the `similar_examples` step and passed as messages to ChatOpenAI: https://smith.langchain.com/public/9585e30f-765a-4ed9-b964-2211420cd2f8/r/fdea98d6-e90f-49d4-ac22-dfd012e9e0d9." ] } ], From 39c44817ae24525cb0a6021632bd98c07a1d114a Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Wed, 21 Aug 2024 11:24:06 -0700 Subject: [PATCH 43/80] infra: test convert_message (#25632) --- .../tests/unit_tests/messages/test_utils.py | 175 ++++++++++++++++++ 1 file changed, 175 insertions(+) diff --git a/libs/core/tests/unit_tests/messages/test_utils.py b/libs/core/tests/unit_tests/messages/test_utils.py index 142272292f609..3f25e02fb2381 100644 --- a/libs/core/tests/unit_tests/messages/test_utils.py +++ b/libs/core/tests/unit_tests/messages/test_utils.py @@ -1,3 +1,4 @@ +import json from typing import Dict, List, Type import pytest @@ -12,6 +13,7 @@ ToolMessage, ) from langchain_core.messages.utils import ( + convert_to_messages, filter_messages, merge_message_runs, trim_messages, @@ -357,3 +359,176 @@ def dummy_token_counter(messages: List[BaseMessage]) -> int: class FakeTokenCountingModel(FakeChatModel): def get_num_tokens_from_messages(self, messages: List[BaseMessage]) -> int: return dummy_token_counter(messages) + + +def test_convert_to_messages() -> None: + message_like: List = [ + # BaseMessage + SystemMessage("1"), + HumanMessage([{"type": "image_url", "image_url": {"url": "2.1"}}], name="2.2"), + AIMessage( + [ + {"type": "text", "text": "3.1"}, + { + "type": "tool_use", + "id": "3.2", + "name": "3.3", + "input": {"3.4": "3.5"}, + }, + ] + ), + AIMessage( + [ + {"type": "text", "text": "4.1"}, + { + "type": "tool_use", + "id": "4.2", + "name": "4.3", + "input": {"4.4": "4.5"}, + }, + ], + tool_calls=[ + { + "name": "4.3", + "args": {"4.4": "4.5"}, + "id": "4.2", + "type": "tool_call", + } + ], + ), + ToolMessage("5.1", tool_call_id="5.2", name="5.3"), + # OpenAI dict + {"role": "system", "content": "6"}, + { + "role": "user", + "content": [{"type": "image_url", "image_url": {"url": "7.1"}}], + "name": "7.2", + }, + { + "role": "assistant", + "content": [{"type": "text", "text": "8.1"}], + "tool_calls": [ + { + "type": "function", + "function": { + "arguments": json.dumps({"8.2": "8.3"}), + "name": "8.4", + }, + "id": "8.5", + } + ], + "name": "8.6", + }, + {"role": "tool", "content": "10.1", "tool_call_id": "10.2"}, + # Tuple/List + ("system", "11.1"), + ("human", [{"type": "image_url", "image_url": {"url": "12.1"}}]), + ( + "ai", + [ + {"type": "text", "text": "13.1"}, + { + "type": "tool_use", + "id": "13.2", + "name": "13.3", + "input": {"13.4": "13.5"}, + }, + ], + ), + # String + "14.1", + # LangChain dict + { + "role": "ai", + "content": [{"type": "text", "text": "15.1"}], + "tool_calls": [{"args": {"15.2": "15.3"}, "name": "15.4", "id": "15.5"}], + "name": "15.6", + }, + ] + expected = [ + SystemMessage(content="1"), + HumanMessage( + content=[{"type": "image_url", "image_url": {"url": "2.1"}}], name="2.2" + ), + AIMessage( + content=[ + {"type": "text", "text": "3.1"}, + { + "type": "tool_use", + "id": "3.2", + "name": "3.3", + "input": {"3.4": "3.5"}, + }, + ] + ), + AIMessage( + content=[ + {"type": "text", "text": "4.1"}, + { + "type": "tool_use", + "id": "4.2", + "name": "4.3", + "input": {"4.4": "4.5"}, + }, + ], + tool_calls=[ + { + "name": "4.3", + "args": {"4.4": "4.5"}, + "id": "4.2", + "type": "tool_call", + } + ], + ), + ToolMessage(content="5.1", name="5.3", tool_call_id="5.2"), + SystemMessage(content="6"), + HumanMessage( + content=[{"type": "image_url", "image_url": {"url": "7.1"}}], name="7.2" + ), + AIMessage( + content=[{"type": "text", "text": "8.1"}], + name="8.6", + tool_calls=[ + { + "name": "8.4", + "args": {"8.2": "8.3"}, + "id": "8.5", + "type": "tool_call", + } + ], + ), + ToolMessage(content="10.1", tool_call_id="10.2"), + SystemMessage(content="11.1"), + HumanMessage(content=[{"type": "image_url", "image_url": {"url": "12.1"}}]), + AIMessage( + content=[ + {"type": "text", "text": "13.1"}, + { + "type": "tool_use", + "id": "13.2", + "name": "13.3", + "input": {"13.4": "13.5"}, + }, + ] + ), + HumanMessage(content="14.1"), + AIMessage( + content=[{"type": "text", "text": "15.1"}], + name="15.6", + tool_calls=[ + { + "name": "15.4", + "args": {"15.2": "15.3"}, + "id": "15.5", + "type": "tool_call", + } + ], + ), + ] + actual = convert_to_messages(message_like) + assert expected == actual + + +@pytest.mark.xfail(reason="AI message does not support refusal key yet.") +def test_convert_to_messages_openai_refusal() -> None: + convert_to_messages([{"role": "assistant", "refusal": "9.1"}]) From b71ae52e6552c6d844da780803c34abf7ec77400 Mon Sep 17 00:00:00 2001 From: Christophe Bornet Date: Wed, 21 Aug 2024 21:25:24 +0200 Subject: [PATCH 44/80] [unstructured][security] Bump unstructured version (#25364) This ensures version 0.15.7+ is pulled. This version of unstructured uses a version of NLTK >= 3.8.2 that has a fix for a critical CVE: https://github.com/advisories/GHSA-cgvx-9447-vcch --- libs/partners/unstructured/poetry.lock | 68 ++++++++++------------- libs/partners/unstructured/pyproject.toml | 4 +- 2 files changed, 31 insertions(+), 41 deletions(-) diff --git a/libs/partners/unstructured/poetry.lock b/libs/partners/unstructured/poetry.lock index a2872562b4bd5..7a3892dd282df 100644 --- a/libs/partners/unstructured/poetry.lock +++ b/libs/partners/unstructured/poetry.lock @@ -1231,7 +1231,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.2.23" +version = "0.2.30" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -1248,6 +1248,7 @@ pydantic = [ ] PyYAML = ">=5.3" tenacity = "^8.1.0,!=8.4.0" +typing-extensions = ">=4.7" [package.source] type = "directory" @@ -1397,9 +1398,13 @@ files = [ {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, @@ -1931,6 +1936,7 @@ description = "Nvidia JIT LTO Library" optional = false python-versions = ">=3" files = [ + {file = "nvidia_nvjitlink_cu12-12.5.82-py3-none-manylinux2014_aarch64.whl", hash = "sha256:98103729cc5226e13ca319a10bbf9433bbbd44ef64fe72f45f067cacc14b8d27"}, {file = "nvidia_nvjitlink_cu12-12.5.82-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f9b37bc5c8cf7509665cb6ada5aaa0ce65618f2332b7d3e78e9790511f111212"}, {file = "nvidia_nvjitlink_cu12-12.5.82-py3-none-win_amd64.whl", hash = "sha256:e782564d705ff0bf61ac3e1bf730166da66dd2fe9012f111ede5fc49b64ae697"}, ] @@ -2911,21 +2917,6 @@ files = [ {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, ] -[[package]] -name = "pytesseract" -version = "0.3.10" -description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytesseract-0.3.10-py3-none-any.whl", hash = "sha256:8f22cc98f765bf13517ead0c70effedb46c153540d25783e04014f28b55a5fc6"}, - {file = "pytesseract-0.3.10.tar.gz", hash = "sha256:f1c3a8b0f07fd01a1085d451f5b8315be6eec1d5577a6796d46dc7a62bd4120f"}, -] - -[package.dependencies] -packaging = ">=21.3" -Pillow = ">=8.0.0" - [[package]] name = "pytest" version = "7.4.4" @@ -3066,18 +3057,19 @@ typing-extensions = ">=4.9.0" [[package]] name = "python-pptx" -version = "0.6.23" -description = "Generate and manipulate Open XML PowerPoint (.pptx) files" +version = "1.0.2" +description = "Create, read, and update PowerPoint 2007+ (.pptx) files." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "python-pptx-0.6.23.tar.gz", hash = "sha256:587497ff28e779ab18dbb074f6d4052893c85dedc95ed75df319364f331fedee"}, - {file = "python_pptx-0.6.23-py3-none-any.whl", hash = "sha256:dd0527194627a2b7cc05f3ba23ecaa2d9a0d5ac9b6193a28ed1b7a716f4217d4"}, + {file = "python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba"}, + {file = "python_pptx-1.0.2.tar.gz", hash = "sha256:479a8af0eaf0f0d76b6f00b0887732874ad2e3188230315290cd1f9dd9cc7095"}, ] [package.dependencies] lxml = ">=3.1.0" Pillow = ">=3.3.2" +typing-extensions = ">=4.9.0" XlsxWriter = ">=0.5.7" [[package]] @@ -3139,7 +3131,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -4081,13 +4072,13 @@ files = [ [[package]] name = "unstructured" -version = "0.15.0" +version = "0.15.7" description = "A library that prepares raw documents for downstream ML tasks." optional = false python-versions = "<3.13,>=3.9.0" files = [ - {file = "unstructured-0.15.0-py3-none-any.whl", hash = "sha256:43538e0463aec3741eef06eaf69fc27f6fdaffa7376816c797cb25abb9b55b45"}, - {file = "unstructured-0.15.0.tar.gz", hash = "sha256:b75deb8e46dffbe1c6f82936810a31ab9516923c3a60bd01821d7701fa13b628"}, + {file = "unstructured-0.15.7-py3-none-any.whl", hash = "sha256:9b176f18776142feed1f058f11d16046ae24d077fa96648979ae9c474819f56c"}, + {file = "unstructured-0.15.7.tar.gz", hash = "sha256:ac55bf31b1d4c19c33c0e2ec5f615d96d03a2bf49a784f23b29d5530b90d6830"}, ] [package.dependencies] @@ -4115,12 +4106,11 @@ pillow-heif = {version = "*", optional = true, markers = "extra == \"all-docs\"" psutil = "*" pypandoc = {version = "*", optional = true, markers = "extra == \"all-docs\""} pypdf = {version = "*", optional = true, markers = "extra == \"all-docs\""} -pytesseract = {version = "*", optional = true, markers = "extra == \"all-docs\""} python-docx = {version = ">=1.1.2", optional = true, markers = "extra == \"all-docs\""} python-iso639 = "*" python-magic = "*" python-oxmsg = {version = "*", optional = true, markers = "extra == \"all-docs\""} -python-pptx = {version = "<=0.6.23", optional = true, markers = "extra == \"all-docs\""} +python-pptx = {version = ">=1.0.1", optional = true, markers = "extra == \"all-docs\""} rapidfuzz = "*" requests = "*" tabulate = "*" @@ -4134,14 +4124,14 @@ xlrd = {version = "*", optional = true, markers = "extra == \"all-docs\""} [package.extras] airtable = ["pyairtable"] -all-docs = ["effdet", "google-cloud-vision", "markdown", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pikepdf", "pillow-heif", "pypandoc", "pypdf", "pytesseract", "python-docx (>=1.1.2)", "python-oxmsg", "python-pptx (<=0.6.23)", "unstructured-inference (==0.7.36)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] -astra = ["astrapy"] +all-docs = ["effdet", "google-cloud-vision", "markdown", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pikepdf", "pillow-heif", "pypandoc", "pypdf", "python-docx (>=1.1.2)", "python-oxmsg", "python-pptx (>=1.0.1)", "unstructured-inference (==0.7.36)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +astradb = ["astrapy"] azure = ["adlfs", "fsspec"] azure-cognitive-search = ["azure-search-documents"] bedrock = ["boto3", "langchain-community"] biomed = ["bs4"] box = ["boxfs", "fsspec"] -chroma = ["chromadb", "importlib-metadata (>=7.1.0)", "typer (<=0.9.0)"] +chroma = ["chromadb", "importlib-metadata (>=8.2.0)", "tenacity (==8.5.0)", "typer (<=0.9.0)"] clarifai = ["clarifai"] confluence = ["atlassian-python-api"] csv = ["pandas"] @@ -4152,7 +4142,7 @@ doc = ["python-docx (>=1.1.2)"] docx = ["python-docx (>=1.1.2)"] dropbox = ["dropboxdrivefs", "fsspec"] elasticsearch = ["elasticsearch[async]"] -embed-huggingface = ["huggingface", "langchain-community", "sentence-transformers"] +embed-huggingface = ["langchain-huggingface"] embed-octoai = ["openai", "tiktoken"] embed-vertexai = ["langchain", "langchain-community", "langchain-google-vertexai"] embed-voyageai = ["langchain", "langchain-voyageai"] @@ -4163,26 +4153,26 @@ gitlab = ["python-gitlab"] google-drive = ["google-api-python-client"] hubspot = ["hubspot-api-client", "urllib3"] huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"] -image = ["effdet", "google-cloud-vision", "onnx", "pdf2image", "pdfminer.six", "pikepdf", "pillow-heif", "pypdf", "pytesseract", "unstructured-inference (==0.7.36)", "unstructured.pytesseract (>=0.3.12)"] +image = ["effdet", "google-cloud-vision", "onnx", "pdf2image", "pdfminer.six", "pikepdf", "pillow-heif", "pypdf", "unstructured-inference (==0.7.36)", "unstructured.pytesseract (>=0.3.12)"] jira = ["atlassian-python-api"] kafka = ["confluent-kafka"] -local-inference = ["effdet", "google-cloud-vision", "markdown", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pikepdf", "pillow-heif", "pypandoc", "pypdf", "pytesseract", "python-docx (>=1.1.2)", "python-oxmsg", "python-pptx (<=0.6.23)", "unstructured-inference (==0.7.36)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +local-inference = ["effdet", "google-cloud-vision", "markdown", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pikepdf", "pillow-heif", "pypandoc", "pypdf", "python-docx (>=1.1.2)", "python-oxmsg", "python-pptx (>=1.0.1)", "unstructured-inference (==0.7.36)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] md = ["markdown"] mongodb = ["pymongo"] msg = ["python-oxmsg"] notion = ["htmlBuilder", "notion-client"] odt = ["pypandoc", "python-docx (>=1.1.2)"] onedrive = ["Office365-REST-Python-Client", "bs4", "msal"] -openai = ["langchain-community", "openai", "tiktoken"] +openai = ["langchain-openai"] opensearch = ["opensearch-py"] org = ["pypandoc"] outlook = ["Office365-REST-Python-Client", "msal"] -paddleocr = ["unstructured.paddleocr (==2.8.0.1)"] -pdf = ["effdet", "google-cloud-vision", "onnx", "pdf2image", "pdfminer.six", "pikepdf", "pillow-heif", "pypdf", "pytesseract", "unstructured-inference (==0.7.36)", "unstructured.pytesseract (>=0.3.12)"] +paddleocr = ["paddlepaddle (==3.0.0b1)", "unstructured.paddleocr (==2.8.0.1)"] +pdf = ["effdet", "google-cloud-vision", "onnx", "pdf2image", "pdfminer.six", "pikepdf", "pillow-heif", "pypdf", "unstructured-inference (==0.7.36)", "unstructured.pytesseract (>=0.3.12)"] pinecone = ["pinecone-client (>=3.7.1)"] postgres = ["psycopg2-binary"] -ppt = ["python-pptx (<=0.6.23)"] -pptx = ["python-pptx (<=0.6.23)"] +ppt = ["python-pptx (>=1.0.1)"] +pptx = ["python-pptx (>=1.0.1)"] qdrant = ["qdrant-client"] reddit = ["praw"] rst = ["pypandoc"] @@ -4416,4 +4406,4 @@ local = ["unstructured"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "250df0f667fbb8eafc622dfe771541a9a25e718cc786f6adb0c13081c4b77745" +content-hash = "d95a01d052e3f6175a45c5a589692274300a88782938ed71f835c5f68842d821" diff --git a/libs/partners/unstructured/pyproject.toml b/libs/partners/unstructured/pyproject.toml index 2e8c42a23b312..38f360549c2df 100644 --- a/libs/partners/unstructured/pyproject.toml +++ b/libs/partners/unstructured/pyproject.toml @@ -15,7 +15,7 @@ license = "MIT" python = ">=3.9,<4.0" langchain-core = "^0.2.23" unstructured-client = { version = "^0.24.1" } -unstructured = { version = "^0.15.0", optional = true, python = "<3.13", extras = [ +unstructured = { version = "^0.15.7", optional = true, python = "<3.13", extras = [ "all-docs", ] } @@ -50,7 +50,7 @@ ruff = "^0.1.8" [tool.poetry.group.typing.dependencies] mypy = "^1.7.1" -unstructured = { version = "^0.15.0", python = "<3.13", extras = ["all-docs"] } +unstructured = { version = "^0.15.7", python = "<3.13", extras = ["all-docs"] } langchain-core = { path = "../../core", develop = true } [tool.poetry.group.dev] From f4b3c90886bbed58694ccfa2f56db9caddea3417 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Wed, 21 Aug 2024 12:38:53 -0700 Subject: [PATCH 45/80] docs: add prereq commas (#25626) --- docs/src/theme/Compatibility.js | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/src/theme/Compatibility.js b/docs/src/theme/Compatibility.js index dbeadc617d3cb..0d33715e8bd70 100644 --- a/docs/src/theme/Compatibility.js +++ b/docs/src/theme/Compatibility.js @@ -8,11 +8,14 @@ export default function Compatibility({ packagesAndVersions }) { The code in this guide requires{" "} {packagesAndVersions.map(([pkg, version], i) => { return ( - =${version}-${i}`}>{`${pkg}>=${version}`} + =${version}-${i}`}> + {`${pkg}>=${version}`} + {i < packagesAndVersions.length - 1 && ", "} + ); })}. Please ensure you have the correct packages installed. ); -} +} \ No newline at end of file From c8be0a9f7071f9143636a1c29e0582ccb1dc5039 Mon Sep 17 00:00:00 2001 From: Erick Friis Date: Wed, 21 Aug 2024 12:53:55 -0700 Subject: [PATCH 46/80] partners/unstructured: release 0.1.2 (#25637) --- libs/partners/unstructured/poetry.lock | 1736 +++++++++++---------- libs/partners/unstructured/pyproject.toml | 2 +- 2 files changed, 902 insertions(+), 836 deletions(-) diff --git a/libs/partners/unstructured/poetry.lock b/libs/partners/unstructured/poetry.lock index 7a3892dd282df..7655416222147 100644 --- a/libs/partners/unstructured/poetry.lock +++ b/libs/partners/unstructured/poetry.lock @@ -77,13 +77,13 @@ lxml = ["lxml"] [[package]] name = "cachetools" -version = "5.4.0" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, - {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] @@ -99,63 +99,78 @@ files = [ [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.0" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, + {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, + {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, + {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, + {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, + {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, + {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, + {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, + {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, + {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, + {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, + {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, + {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, + {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, + {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, ] [package.dependencies] @@ -742,13 +757,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.32.0" +version = "2.34.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google_auth-2.32.0-py2.py3-none-any.whl", hash = "sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b"}, - {file = "google_auth-2.32.0.tar.gz", hash = "sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022"}, + {file = "google_auth-2.34.0-py2.py3-none-any.whl", hash = "sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65"}, + {file = "google_auth-2.34.0.tar.gz", hash = "sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc"}, ] [package.dependencies] @@ -758,20 +773,20 @@ rsa = ">=3.1.4,<5" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-cloud-vision" -version = "3.7.3" +version = "3.7.4" description = "Google Cloud Vision API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-vision-3.7.3.tar.gz", hash = "sha256:ee48c48e820c7e09932663986970fef456dc0f45c7af729d46f1f3dd97c1dfb6"}, - {file = "google_cloud_vision-3.7.3-py2.py3-none-any.whl", hash = "sha256:d989b7e91880f7bd11d65530c3140fc130bbe29513aebb80f3d3e107764265d1"}, + {file = "google_cloud_vision-3.7.4-py2.py3-none-any.whl", hash = "sha256:0b956480002545ab8f13d2b4b8f316e9332cdeb6f65f92c0a20d72e9e0df3ad6"}, + {file = "google_cloud_vision-3.7.4.tar.gz", hash = "sha256:80b67f0a2dc587a31d7482d3d2692a773f25fbd09468fd9de45d00b671aad999"}, ] [package.dependencies] @@ -799,76 +814,76 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "grpcio" -version = "1.65.1" +version = "1.65.5" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.65.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:3dc5f928815b8972fb83b78d8db5039559f39e004ec93ebac316403fe031a062"}, - {file = "grpcio-1.65.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:8333ca46053c35484c9f2f7e8d8ec98c1383a8675a449163cea31a2076d93de8"}, - {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:7af64838b6e615fff0ec711960ed9b6ee83086edfa8c32670eafb736f169d719"}, - {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbb64b4166362d9326f7efbf75b1c72106c1aa87f13a8c8b56a1224fac152f5c"}, - {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8422dc13ad93ec8caa2612b5032a2b9cd6421c13ed87f54db4a3a2c93afaf77"}, - {file = "grpcio-1.65.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4effc0562b6c65d4add6a873ca132e46ba5e5a46f07c93502c37a9ae7f043857"}, - {file = "grpcio-1.65.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a6c71575a2fedf259724981fd73a18906513d2f306169c46262a5bae956e6364"}, - {file = "grpcio-1.65.1-cp310-cp310-win32.whl", hash = "sha256:34966cf526ef0ea616e008d40d989463e3db157abb213b2f20c6ce0ae7928875"}, - {file = "grpcio-1.65.1-cp310-cp310-win_amd64.whl", hash = "sha256:ca931de5dd6d9eb94ff19a2c9434b23923bce6f767179fef04dfa991f282eaad"}, - {file = "grpcio-1.65.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:bbb46330cc643ecf10bd9bd4ca8e7419a14b6b9dedd05f671c90fb2c813c6037"}, - {file = "grpcio-1.65.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d827a6fb9215b961eb73459ad7977edb9e748b23e3407d21c845d1d8ef6597e5"}, - {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:6e71aed8835f8d9fbcb84babc93a9da95955d1685021cceb7089f4f1e717d719"}, - {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a1c84560b3b2d34695c9ba53ab0264e2802721c530678a8f0a227951f453462"}, - {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27adee2338d697e71143ed147fe286c05810965d5d30ec14dd09c22479bfe48a"}, - {file = "grpcio-1.65.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f62652ddcadc75d0e7aa629e96bb61658f85a993e748333715b4ab667192e4e8"}, - {file = "grpcio-1.65.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:71a05fd814700dd9cb7d9a507f2f6a1ef85866733ccaf557eedacec32d65e4c2"}, - {file = "grpcio-1.65.1-cp311-cp311-win32.whl", hash = "sha256:b590f1ad056294dfaeac0b7e1b71d3d5ace638d8dd1f1147ce4bd13458783ba8"}, - {file = "grpcio-1.65.1-cp311-cp311-win_amd64.whl", hash = "sha256:12e9bdf3b5fd48e5fbe5b3da382ad8f97c08b47969f3cca81dd9b36b86ed39e2"}, - {file = "grpcio-1.65.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:54cb822e177374b318b233e54b6856c692c24cdbd5a3ba5335f18a47396bac8f"}, - {file = "grpcio-1.65.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:aaf3c54419a28d45bd1681372029f40e5bfb58e5265e3882eaf21e4a5f81a119"}, - {file = "grpcio-1.65.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:557de35bdfbe8bafea0a003dbd0f4da6d89223ac6c4c7549d78e20f92ead95d9"}, - {file = "grpcio-1.65.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8bfd95ef3b097f0cc86ade54eafefa1c8ed623aa01a26fbbdcd1a3650494dd11"}, - {file = "grpcio-1.65.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e6a8f3d6c41e6b642870afe6cafbaf7b61c57317f9ec66d0efdaf19db992b90"}, - {file = "grpcio-1.65.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1faaf7355ceed07ceaef0b9dcefa4c98daf1dd8840ed75c2de128c3f4a4d859d"}, - {file = "grpcio-1.65.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:60f1f38eed830488ad2a1b11579ef0f345ff16fffdad1d24d9fbc97ba31804ff"}, - {file = "grpcio-1.65.1-cp312-cp312-win32.whl", hash = "sha256:e75acfa52daf5ea0712e8aa82f0003bba964de7ae22c26d208cbd7bc08500177"}, - {file = "grpcio-1.65.1-cp312-cp312-win_amd64.whl", hash = "sha256:ff5a84907e51924973aa05ed8759210d8cdae7ffcf9e44fd17646cf4a902df59"}, - {file = "grpcio-1.65.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:1fbd6331f18c3acd7e09d17fd840c096f56eaf0ef830fbd50af45ae9dc8dfd83"}, - {file = "grpcio-1.65.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:de5b6be29116e094c5ef9d9e4252e7eb143e3d5f6bd6d50a78075553ab4930b0"}, - {file = "grpcio-1.65.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:e4a3cdba62b2d6aeae6027ae65f350de6dc082b72e6215eccf82628e79efe9ba"}, - {file = "grpcio-1.65.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:941c4869aa229d88706b78187d60d66aca77fe5c32518b79e3c3e03fc26109a2"}, - {file = "grpcio-1.65.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f40cebe5edb518d78b8131e87cb83b3ee688984de38a232024b9b44e74ee53d3"}, - {file = "grpcio-1.65.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2ca684ba331fb249d8a1ce88db5394e70dbcd96e58d8c4b7e0d7b141a453dce9"}, - {file = "grpcio-1.65.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8558f0083ddaf5de64a59c790bffd7568e353914c0c551eae2955f54ee4b857f"}, - {file = "grpcio-1.65.1-cp38-cp38-win32.whl", hash = "sha256:8d8143a3e3966f85dce6c5cc45387ec36552174ba5712c5dc6fcc0898fb324c0"}, - {file = "grpcio-1.65.1-cp38-cp38-win_amd64.whl", hash = "sha256:76e81a86424d6ca1ce7c16b15bdd6a964a42b40544bf796a48da241fdaf61153"}, - {file = "grpcio-1.65.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:cb5175f45c980ff418998723ea1b3869cce3766d2ab4e4916fbd3cedbc9d0ed3"}, - {file = "grpcio-1.65.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b12c1aa7b95abe73b3e04e052c8b362655b41c7798da69f1eaf8d186c7d204df"}, - {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:3019fb50128b21a5e018d89569ffaaaa361680e1346c2f261bb84a91082eb3d3"}, - {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ae15275ed98ea267f64ee9ddedf8ecd5306a5b5bb87972a48bfe24af24153e8"}, - {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f096ffb881f37e8d4f958b63c74bfc400c7cebd7a944b027357cd2fb8d91a57"}, - {file = "grpcio-1.65.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2f56b5a68fdcf17a0a1d524bf177218c3c69b3947cb239ea222c6f1867c3ab68"}, - {file = "grpcio-1.65.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:941596d419b9736ab548aa0feb5bbba922f98872668847bf0720b42d1d227b9e"}, - {file = "grpcio-1.65.1-cp39-cp39-win32.whl", hash = "sha256:5fd7337a823b890215f07d429f4f193d24b80d62a5485cf88ee06648591a0c57"}, - {file = "grpcio-1.65.1-cp39-cp39-win_amd64.whl", hash = "sha256:1bceeec568372cbebf554eae1b436b06c2ff24cfaf04afade729fb9035408c6c"}, - {file = "grpcio-1.65.1.tar.gz", hash = "sha256:3c492301988cd720cd145d84e17318d45af342e29ef93141228f9cd73222368b"}, + {file = "grpcio-1.65.5-cp310-cp310-linux_armv7l.whl", hash = "sha256:b67d450f1e008fedcd81e097a3a400a711d8be1a8b20f852a7b8a73fead50fe3"}, + {file = "grpcio-1.65.5-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:a70a20eed87bba647a38bedd93b3ce7db64b3f0e8e0952315237f7f5ca97b02d"}, + {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:f79c87c114bf37adf408026b9e2e333fe9ff31dfc9648f6f80776c513145c813"}, + {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17f9fa2d947dbfaca01b3ab2c62eefa8240131fdc67b924eb42ce6032e3e5c1"}, + {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32d60e18ff7c34fe3f6db3d35ad5c6dc99f5b43ff3982cb26fad4174462d10b1"}, + {file = "grpcio-1.65.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fe6505376f5b00bb008e4e1418152e3ad3d954b629da286c7913ff3cfc0ff740"}, + {file = "grpcio-1.65.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:33158e56c6378063923c417e9fbdb28660b6e0e2835af42e67f5a7793f587af7"}, + {file = "grpcio-1.65.5-cp310-cp310-win32.whl", hash = "sha256:1cbc208edb9acf1cc339396a1a36b83796939be52f34e591c90292045b579fbf"}, + {file = "grpcio-1.65.5-cp310-cp310-win_amd64.whl", hash = "sha256:bc74f3f745c37e2c5685c9d2a2d5a94de00f286963f5213f763ae137bf4f2358"}, + {file = "grpcio-1.65.5-cp311-cp311-linux_armv7l.whl", hash = "sha256:3207ae60d07e5282c134b6e02f9271a2cb523c6d7a346c6315211fe2bf8d61ed"}, + {file = "grpcio-1.65.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a2f80510f99f82d4eb825849c486df703f50652cea21c189eacc2b84f2bde764"}, + {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a80e9a5e3f93c54f5eb82a3825ea1fc4965b2fa0026db2abfecb139a5c4ecdf1"}, + {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b2944390a496567de9e70418f3742b477d85d8ca065afa90432edc91b4bb8ad"}, + {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3655139d7be213c32c79ef6fb2367cae28e56ef68e39b1961c43214b457f257"}, + {file = "grpcio-1.65.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05f02d68fc720e085f061b704ee653b181e6d5abfe315daef085719728d3d1fd"}, + {file = "grpcio-1.65.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1c4caafe71aef4dabf53274bbf4affd6df651e9f80beedd6b8e08ff438ed3260"}, + {file = "grpcio-1.65.5-cp311-cp311-win32.whl", hash = "sha256:84c901cdec16a092099f251ef3360d15e29ef59772150fa261d94573612539b5"}, + {file = "grpcio-1.65.5-cp311-cp311-win_amd64.whl", hash = "sha256:11f8b16121768c1cb99d7dcb84e01510e60e6a206bf9123e134118802486f035"}, + {file = "grpcio-1.65.5-cp312-cp312-linux_armv7l.whl", hash = "sha256:ee6ed64a27588a2c94e8fa84fe8f3b5c89427d4d69c37690903d428ec61ca7e4"}, + {file = "grpcio-1.65.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:76991b7a6fb98630a3328839755181ce7c1aa2b1842aa085fd4198f0e5198960"}, + {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:89c00a18801b1ed9cc441e29b521c354725d4af38c127981f2c950c796a09b6e"}, + {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:078038e150a897e5e402ed3d57f1d31ebf604cbed80f595bd281b5da40762a92"}, + {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c97962720489ef31b5ad8a916e22bc31bba3664e063fb9f6702dce056d4aa61b"}, + {file = "grpcio-1.65.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b8270b15b99781461b244f5c81d5c2bc9696ab9189fb5ff86c841417fb3b39fe"}, + {file = "grpcio-1.65.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e5c4c15ac3fe1eb68e46bc51e66ad29be887479f231f8237cf8416058bf0cc1"}, + {file = "grpcio-1.65.5-cp312-cp312-win32.whl", hash = "sha256:f5b5970341359341d0e4c789da7568264b2a89cd976c05ea476036852b5950cd"}, + {file = "grpcio-1.65.5-cp312-cp312-win_amd64.whl", hash = "sha256:238a625f391a1b9f5f069bdc5930f4fd71b74426bea52196fc7b83f51fa97d34"}, + {file = "grpcio-1.65.5-cp38-cp38-linux_armv7l.whl", hash = "sha256:6c4e62bcf297a1568f627f39576dbfc27f1e5338a691c6dd5dd6b3979da51d1c"}, + {file = "grpcio-1.65.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d7df567b67d16d4177835a68d3f767bbcbad04da9dfb52cbd19171f430c898bd"}, + {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:b7ca419f1462390851eec395b2089aad1e49546b52d4e2c972ceb76da69b10f8"}, + {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa36dd8496d3af0d40165252a669fa4f6fd2db4b4026b9a9411cbf060b9d6a15"}, + {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a101696f9ece90a0829988ff72f1b1ea2358f3df035bdf6d675dd8b60c2c0894"}, + {file = "grpcio-1.65.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2a6d8169812932feac514b420daffae8ab8e36f90f3122b94ae767e633296b17"}, + {file = "grpcio-1.65.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:47d0aaaab82823f0aa6adea5184350b46e2252e13a42a942db84da5b733f2e05"}, + {file = "grpcio-1.65.5-cp38-cp38-win32.whl", hash = "sha256:85ae8f8517d5bcc21fb07dbf791e94ed84cc28f84c903cdc2bd7eaeb437c8f45"}, + {file = "grpcio-1.65.5-cp38-cp38-win_amd64.whl", hash = "sha256:770bd4bd721961f6dd8049bc27338564ba8739913f77c0f381a9815e465ff965"}, + {file = "grpcio-1.65.5-cp39-cp39-linux_armv7l.whl", hash = "sha256:ab5ec837d8cee8dbce9ef6386125f119b231e4333cc6b6d57b6c5c7c82a72331"}, + {file = "grpcio-1.65.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cabd706183ee08d8026a015af5819a0b3a8959bdc9d1f6fdacd1810f09200f2a"}, + {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:ec71fc5b39821ad7d80db7473c8f8c2910f3382f0ddadfbcfc2c6c437107eb67"}, + {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a9e35bcb045e39d7cac30464c285389b9a816ac2067e4884ad2c02e709ef8e"}, + {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d750e9330eb14236ca11b78d0c494eed13d6a95eb55472298f0e547c165ee324"}, + {file = "grpcio-1.65.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2b91ce647b6307f25650872454a4d02a2801f26a475f90d0b91ed8110baae589"}, + {file = "grpcio-1.65.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8da58ff80bc4556cf29bc03f5fff1f03b8387d6aaa7b852af9eb65b2cf833be4"}, + {file = "grpcio-1.65.5-cp39-cp39-win32.whl", hash = "sha256:7a412959aa5f08c5ac04aa7b7c3c041f5e4298cadd4fcc2acff195b56d185ebc"}, + {file = "grpcio-1.65.5-cp39-cp39-win_amd64.whl", hash = "sha256:55714ea852396ec9568f45f487639945ab674de83c12bea19d5ddbc3ae41ada3"}, + {file = "grpcio-1.65.5.tar.gz", hash = "sha256:ec6f219fb5d677a522b0deaf43cea6697b16f338cb68d009e30930c4aa0d2209"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.65.1)"] +protobuf = ["grpcio-tools (>=1.65.5)"] [[package]] name = "grpcio-status" -version = "1.65.1" +version = "1.65.5" description = "Status proto mapping for gRPC" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio_status-1.65.1-py3-none-any.whl", hash = "sha256:0ec2070f7dbcc2fe78a7b34233a2a00f8ced727d2f1dec1af422d628cf86b92c"}, - {file = "grpcio_status-1.65.1.tar.gz", hash = "sha256:740d68d4a1824e59063f394df05171886262d5367b82256d54aac8aa7c5c79bf"}, + {file = "grpcio_status-1.65.5-py3-none-any.whl", hash = "sha256:44a445ce55375545a913e005be36fbec7999a4cc320d7aecb7a4469d3d49366c"}, + {file = "grpcio_status-1.65.5.tar.gz", hash = "sha256:2c9fa3af32efd26f01837d44305dce106973bc5357b9a9fc8bbd87bb8bf833d1"}, ] [package.dependencies] googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.65.1" +grpcio = ">=1.65.5" protobuf = ">=5.26.1,<6.0dev" [[package]] @@ -929,13 +944,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "huggingface-hub" -version = "0.24.2" +version = "0.24.6" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.24.2-py3-none-any.whl", hash = "sha256:abdf3244d3a274c4b1fbc5c4a1ef700032b3f60ba93cc63e4f036fd082aa2805"}, - {file = "huggingface_hub-0.24.2.tar.gz", hash = "sha256:92be892405d2f6a7a8479016f9a5662354f202b2c6c1ff499609621aed1fae10"}, + {file = "huggingface_hub-0.24.6-py3-none-any.whl", hash = "sha256:a990f3232aa985fe749bc9474060cbad75e8b2f115f6665a9fda5b9c97818970"}, + {file = "huggingface_hub-0.24.6.tar.gz", hash = "sha256:cc2579e761d070713eaa9c323e3debe39d5b464ae3a7261c39a9195b27bb8000"}, ] [package.dependencies] @@ -988,13 +1003,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "8.2.0" +version = "8.4.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.2.0-py3-none-any.whl", hash = "sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369"}, - {file = "importlib_metadata-8.2.0.tar.gz", hash = "sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] @@ -1007,21 +1022,21 @@ test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "p [[package]] name = "importlib-resources" -version = "6.4.0" +version = "6.4.3" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, - {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, + {file = "importlib_resources-6.4.3-py3-none-any.whl", hash = "sha256:2d6dfe3b9e055f72495c2085890837fc8c758984e209115c8792bddcb762cd93"}, + {file = "importlib_resources-6.4.3.tar.gz", hash = "sha256:4a202b9b9d38563b46da59221d77bb73862ab5d79d461307bcb826d725448b98"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] [[package]] name = "iniconfig" @@ -1231,7 +1246,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.2.30" +version = "0.2.34" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -1270,16 +1285,17 @@ six = "*" [[package]] name = "langsmith" -version = "0.1.93" +version = "0.1.101" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.93-py3-none-any.whl", hash = "sha256:811210b9d5f108f36431bd7b997eb9476a9ecf5a2abd7ddbb606c1cdcf0f43ce"}, - {file = "langsmith-0.1.93.tar.gz", hash = "sha256:285b6ad3a54f50fa8eb97b5f600acc57d0e37e139dd8cf2111a117d0435ba9b4"}, + {file = "langsmith-0.1.101-py3-none-any.whl", hash = "sha256:572e2c90709cda1ad837ac86cedda7295f69933f2124c658a92a35fb890477cc"}, + {file = "langsmith-0.1.101.tar.gz", hash = "sha256:caf4d95f314bb6cd3c4e0632eed821fd5cd5d0f18cb824772fce6d7a9113895b"}, ] [package.dependencies] +httpx = ">=0.23.0,<1" orjson = ">=3.9.14,<4.0.0" pydantic = [ {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, @@ -1319,153 +1335,149 @@ tesseract = ["pytesseract"] [[package]] name = "lxml" -version = "5.2.2" +version = "5.3.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" files = [ - {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"}, - {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"}, - {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"}, - {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"}, - {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"}, - {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"}, - {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"}, - {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"}, - {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"}, - {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"}, - {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"}, - {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"}, - {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"}, - {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"}, - {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"}, - {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"}, - {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, - {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, - {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, - {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, - {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"}, - {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"}, - {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"}, - {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"}, - {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"}, - {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"}, - {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"}, - {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, + {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, + {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, + {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, + {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, + {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, + {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, + {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, + {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, + {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, + {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, + {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, + {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, + {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, + {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, + {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, + {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, + {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, + {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, + {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, + {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, + {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, ] [package.extras] @@ -1473,17 +1485,17 @@ cssselect = ["cssselect (>=0.7)"] html-clean = ["lxml-html-clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.10)"] +source = ["Cython (>=3.0.11)"] [[package]] name = "markdown" -version = "3.6" +version = "3.7" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, - {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, + {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, + {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, ] [package.dependencies] @@ -1564,13 +1576,13 @@ files = [ [[package]] name = "marshmallow" -version = "3.21.3" +version = "3.22.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"}, - {file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"}, + {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, + {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, ] [package.dependencies] @@ -1578,45 +1590,56 @@ packaging = ">=17.0" [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] tests = ["pytest", "pytz", "simplejson"] [[package]] name = "matplotlib" -version = "3.9.1" +version = "3.9.2" description = "Python plotting package" optional = false python-versions = ">=3.9" files = [ - {file = "matplotlib-3.9.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7ccd6270066feb9a9d8e0705aa027f1ff39f354c72a87efe8fa07632f30fc6bb"}, - {file = "matplotlib-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:591d3a88903a30a6d23b040c1e44d1afdd0d778758d07110eb7596f811f31842"}, - {file = "matplotlib-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2a59ff4b83d33bca3b5ec58203cc65985367812cb8c257f3e101632be86d92"}, - {file = "matplotlib-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fc001516ffcf1a221beb51198b194d9230199d6842c540108e4ce109ac05cc0"}, - {file = "matplotlib-3.9.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:83c6a792f1465d174c86d06f3ae85a8fe36e6f5964633ae8106312ec0921fdf5"}, - {file = "matplotlib-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:421851f4f57350bcf0811edd754a708d2275533e84f52f6760b740766c6747a7"}, - {file = "matplotlib-3.9.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b3fce58971b465e01b5c538f9d44915640c20ec5ff31346e963c9e1cd66fa812"}, - {file = "matplotlib-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a973c53ad0668c53e0ed76b27d2eeeae8799836fd0d0caaa4ecc66bf4e6676c0"}, - {file = "matplotlib-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82cd5acf8f3ef43f7532c2f230249720f5dc5dd40ecafaf1c60ac8200d46d7eb"}, - {file = "matplotlib-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab38a4f3772523179b2f772103d8030215b318fef6360cb40558f585bf3d017f"}, - {file = "matplotlib-3.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2315837485ca6188a4b632c5199900e28d33b481eb083663f6a44cfc8987ded3"}, - {file = "matplotlib-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a0c977c5c382f6696caf0bd277ef4f936da7e2aa202ff66cad5f0ac1428ee15b"}, - {file = "matplotlib-3.9.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:565d572efea2b94f264dd86ef27919515aa6d629252a169b42ce5f570db7f37b"}, - {file = "matplotlib-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d397fd8ccc64af2ec0af1f0efc3bacd745ebfb9d507f3f552e8adb689ed730a"}, - {file = "matplotlib-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26040c8f5121cd1ad712abffcd4b5222a8aec3a0fe40bc8542c94331deb8780d"}, - {file = "matplotlib-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12cb1837cffaac087ad6b44399d5e22b78c729de3cdae4629e252067b705e2b"}, - {file = "matplotlib-3.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0e835c6988edc3d2d08794f73c323cc62483e13df0194719ecb0723b564e0b5c"}, - {file = "matplotlib-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:44a21d922f78ce40435cb35b43dd7d573cf2a30138d5c4b709d19f00e3907fd7"}, - {file = "matplotlib-3.9.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0c584210c755ae921283d21d01f03a49ef46d1afa184134dd0f95b0202ee6f03"}, - {file = "matplotlib-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11fed08f34fa682c2b792942f8902e7aefeed400da71f9e5816bea40a7ce28fe"}, - {file = "matplotlib-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0000354e32efcfd86bda75729716b92f5c2edd5b947200be9881f0a671565c33"}, - {file = "matplotlib-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db17fea0ae3aceb8e9ac69c7e3051bae0b3d083bfec932240f9bf5d0197a049"}, - {file = "matplotlib-3.9.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:208cbce658b72bf6a8e675058fbbf59f67814057ae78165d8a2f87c45b48d0ff"}, - {file = "matplotlib-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:dc23f48ab630474264276be156d0d7710ac6c5a09648ccdf49fef9200d8cbe80"}, - {file = "matplotlib-3.9.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3fda72d4d472e2ccd1be0e9ccb6bf0d2eaf635e7f8f51d737ed7e465ac020cb3"}, - {file = "matplotlib-3.9.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:84b3ba8429935a444f1fdc80ed930babbe06725bcf09fbeb5c8757a2cd74af04"}, - {file = "matplotlib-3.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b918770bf3e07845408716e5bbda17eadfc3fcbd9307dc67f37d6cf834bb3d98"}, - {file = "matplotlib-3.9.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f1f2e5d29e9435c97ad4c36fb6668e89aee13d48c75893e25cef064675038ac9"}, - {file = "matplotlib-3.9.1.tar.gz", hash = "sha256:de06b19b8db95dd33d0dc17c926c7c9ebed9f572074b6fac4f65068a6814d010"}, + {file = "matplotlib-3.9.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9d78bbc0cbc891ad55b4f39a48c22182e9bdaea7fc0e5dbd364f49f729ca1bbb"}, + {file = "matplotlib-3.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c375cc72229614632c87355366bdf2570c2dac01ac66b8ad048d2dabadf2d0d4"}, + {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d94ff717eb2bd0b58fe66380bd8b14ac35f48a98e7c6765117fe67fb7684e64"}, + {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab68d50c06938ef28681073327795c5db99bb4666214d2d5f880ed11aeaded66"}, + {file = "matplotlib-3.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:65aacf95b62272d568044531e41de26285d54aec8cb859031f511f84bd8b495a"}, + {file = "matplotlib-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:3fd595f34aa8a55b7fc8bf9ebea8aa665a84c82d275190a61118d33fbc82ccae"}, + {file = "matplotlib-3.9.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d8dd059447824eec055e829258ab092b56bb0579fc3164fa09c64f3acd478772"}, + {file = "matplotlib-3.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c797dac8bb9c7a3fd3382b16fe8f215b4cf0f22adccea36f1545a6d7be310b41"}, + {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d719465db13267bcef19ea8954a971db03b9f48b4647e3860e4bc8e6ed86610f"}, + {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8912ef7c2362f7193b5819d17dae8629b34a95c58603d781329712ada83f9447"}, + {file = "matplotlib-3.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7741f26a58a240f43bee74965c4882b6c93df3e7eb3de160126d8c8f53a6ae6e"}, + {file = "matplotlib-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:ae82a14dab96fbfad7965403c643cafe6515e386de723e498cf3eeb1e0b70cc7"}, + {file = "matplotlib-3.9.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ac43031375a65c3196bee99f6001e7fa5bdfb00ddf43379d3c0609bdca042df9"}, + {file = "matplotlib-3.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be0fc24a5e4531ae4d8e858a1a548c1fe33b176bb13eff7f9d0d38ce5112a27d"}, + {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf81de2926c2db243c9b2cbc3917619a0fc85796c6ba4e58f541df814bbf83c7"}, + {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ee45bc4245533111ced13f1f2cace1e7f89d1c793390392a80c139d6cf0e6c"}, + {file = "matplotlib-3.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:306c8dfc73239f0e72ac50e5a9cf19cc4e8e331dd0c54f5e69ca8758550f1e1e"}, + {file = "matplotlib-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:5413401594cfaff0052f9d8b1aafc6d305b4bd7c4331dccd18f561ff7e1d3bd3"}, + {file = "matplotlib-3.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:18128cc08f0d3cfff10b76baa2f296fc28c4607368a8402de61bb3f2eb33c7d9"}, + {file = "matplotlib-3.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4876d7d40219e8ae8bb70f9263bcbe5714415acfdf781086601211335e24f8aa"}, + {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d9f07a80deab4bb0b82858a9e9ad53d1382fd122be8cde11080f4e7dfedb38b"}, + {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7c0410f181a531ec4e93bbc27692f2c71a15c2da16766f5ba9761e7ae518413"}, + {file = "matplotlib-3.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:909645cce2dc28b735674ce0931a4ac94e12f5b13f6bb0b5a5e65e7cea2c192b"}, + {file = "matplotlib-3.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:f32c7410c7f246838a77d6d1eff0c0f87f3cb0e7c4247aebea71a6d5a68cab49"}, + {file = "matplotlib-3.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:37e51dd1c2db16ede9cfd7b5cabdfc818b2c6397c83f8b10e0e797501c963a03"}, + {file = "matplotlib-3.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b82c5045cebcecd8496a4d694d43f9cc84aeeb49fe2133e036b207abe73f4d30"}, + {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f053c40f94bc51bc03832a41b4f153d83f2062d88c72b5e79997072594e97e51"}, + {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbe196377a8248972f5cede786d4c5508ed5f5ca4a1e09b44bda889958b33f8c"}, + {file = "matplotlib-3.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5816b1e1fe8c192cbc013f8f3e3368ac56fbecf02fb41b8f8559303f24c5015e"}, + {file = "matplotlib-3.9.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cef2a73d06601437be399908cf13aee74e86932a5ccc6ccdf173408ebc5f6bb2"}, + {file = "matplotlib-3.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e0830e188029c14e891fadd99702fd90d317df294c3298aad682739c5533721a"}, + {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ba9c1299c920964e8d3857ba27173b4dbb51ca4bab47ffc2c2ba0eb5e2cbc5"}, + {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd93b91ab47a3616b4d3c42b52f8363b88ca021e340804c6ab2536344fad9ca"}, + {file = "matplotlib-3.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6d1ce5ed2aefcdce11904fc5bbea7d9c21fff3d5f543841edf3dea84451a09ea"}, + {file = "matplotlib-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:b2696efdc08648536efd4e1601b5fd491fd47f4db97a5fbfd175549a7365c1b2"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d52a3b618cb1cbb769ce2ee1dcdb333c3ab6e823944e9a2d36e37253815f9556"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:039082812cacd6c6bec8e17a9c1e6baca230d4116d522e81e1f63a74d01d2e21"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6758baae2ed64f2331d4fd19be38b7b4eae3ecec210049a26b6a4f3ae1c85dcc"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:050598c2b29e0b9832cde72bcf97627bf00262adbc4a54e2b856426bb2ef0697"}, + {file = "matplotlib-3.9.2.tar.gz", hash = "sha256:96ab43906269ca64a6366934106fa01534454a69e471b7bf3d79083981aaab92"}, ] [package.dependencies] @@ -1653,38 +1676,38 @@ tests = ["pytest (>=4.6)"] [[package]] name = "mypy" -version = "1.11.0" +version = "1.11.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3824187c99b893f90c845bab405a585d1ced4ff55421fdf5c84cb7710995229"}, - {file = "mypy-1.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:96f8dbc2c85046c81bcddc246232d500ad729cb720da4e20fce3b542cab91287"}, - {file = "mypy-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a5d8d8dd8613a3e2be3eae829ee891b6b2de6302f24766ff06cb2875f5be9c6"}, - {file = "mypy-1.11.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72596a79bbfb195fd41405cffa18210af3811beb91ff946dbcb7368240eed6be"}, - {file = "mypy-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:35ce88b8ed3a759634cb4eb646d002c4cef0a38f20565ee82b5023558eb90c00"}, - {file = "mypy-1.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:98790025861cb2c3db8c2f5ad10fc8c336ed2a55f4daf1b8b3f877826b6ff2eb"}, - {file = "mypy-1.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25bcfa75b9b5a5f8d67147a54ea97ed63a653995a82798221cca2a315c0238c1"}, - {file = "mypy-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bea2a0e71c2a375c9fa0ede3d98324214d67b3cbbfcbd55ac8f750f85a414e3"}, - {file = "mypy-1.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2b3d36baac48e40e3064d2901f2fbd2a2d6880ec6ce6358825c85031d7c0d4d"}, - {file = "mypy-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8e2e43977f0e09f149ea69fd0556623919f816764e26d74da0c8a7b48f3e18a"}, - {file = "mypy-1.11.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1d44c1e44a8be986b54b09f15f2c1a66368eb43861b4e82573026e04c48a9e20"}, - {file = "mypy-1.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cea3d0fb69637944dd321f41bc896e11d0fb0b0aa531d887a6da70f6e7473aba"}, - {file = "mypy-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a83ec98ae12d51c252be61521aa5731f5512231d0b738b4cb2498344f0b840cd"}, - {file = "mypy-1.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7b73a856522417beb78e0fb6d33ef89474e7a622db2653bc1285af36e2e3e3d"}, - {file = "mypy-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:f2268d9fcd9686b61ab64f077be7ffbc6fbcdfb4103e5dd0cc5eaab53a8886c2"}, - {file = "mypy-1.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:940bfff7283c267ae6522ef926a7887305945f716a7704d3344d6d07f02df850"}, - {file = "mypy-1.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:14f9294528b5f5cf96c721f231c9f5b2733164e02c1c018ed1a0eff8a18005ac"}, - {file = "mypy-1.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7b54c27783991399046837df5c7c9d325d921394757d09dbcbf96aee4649fe9"}, - {file = "mypy-1.11.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:65f190a6349dec29c8d1a1cd4aa71284177aee5949e0502e6379b42873eddbe7"}, - {file = "mypy-1.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbe286303241fea8c2ea5466f6e0e6a046a135a7e7609167b07fd4e7baf151bf"}, - {file = "mypy-1.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:104e9c1620c2675420abd1f6c44bab7dd33cc85aea751c985006e83dcd001095"}, - {file = "mypy-1.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f006e955718ecd8d159cee9932b64fba8f86ee6f7728ca3ac66c3a54b0062abe"}, - {file = "mypy-1.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:becc9111ca572b04e7e77131bc708480cc88a911adf3d0239f974c034b78085c"}, - {file = "mypy-1.11.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6801319fe76c3f3a3833f2b5af7bd2c17bb93c00026a2a1b924e6762f5b19e13"}, - {file = "mypy-1.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:c1a184c64521dc549324ec6ef7cbaa6b351912be9cb5edb803c2808a0d7e85ac"}, - {file = "mypy-1.11.0-py3-none-any.whl", hash = "sha256:56913ec8c7638b0091ef4da6fcc9136896914a9d60d54670a75880c3e5b99ace"}, - {file = "mypy-1.11.0.tar.gz", hash = "sha256:93743608c7348772fdc717af4aeee1997293a1ad04bc0ea6efa15bf65385c538"}, + {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, + {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, + {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, + {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, + {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, + {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, + {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, + {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, + {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, + {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, + {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, + {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, + {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, + {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, + {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, + {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, + {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, + {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, + {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, ] [package.dependencies] @@ -1740,13 +1763,13 @@ test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "nltk" -version = "3.8.1" +version = "3.9.1" description = "Natural Language Toolkit" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, - {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, + {file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"}, + {file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"}, ] [package.dependencies] @@ -1931,14 +1954,14 @@ files = [ [[package]] name = "nvidia-nvjitlink-cu12" -version = "12.5.82" +version = "12.6.20" description = "Nvidia JIT LTO Library" optional = false python-versions = ">=3" files = [ - {file = "nvidia_nvjitlink_cu12-12.5.82-py3-none-manylinux2014_aarch64.whl", hash = "sha256:98103729cc5226e13ca319a10bbf9433bbbd44ef64fe72f45f067cacc14b8d27"}, - {file = "nvidia_nvjitlink_cu12-12.5.82-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f9b37bc5c8cf7509665cb6ada5aaa0ce65618f2332b7d3e78e9790511f111212"}, - {file = "nvidia_nvjitlink_cu12-12.5.82-py3-none-win_amd64.whl", hash = "sha256:e782564d705ff0bf61ac3e1bf730166da66dd2fe9012f111ede5fc49b64ae697"}, + {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_aarch64.whl", hash = "sha256:84fb38465a5bc7c70cbc320cfd0963eb302ee25a5e939e9f512bbba55b6072fb"}, + {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl", hash = "sha256:562ab97ea2c23164823b2a89cb328d01d45cb99634b8c65fe7cd60d14562bd79"}, + {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-win_amd64.whl", hash = "sha256:ed3c43a17f37b0c922a919203d2d36cbef24d41cc3e6b625182f8b58203644f6"}, ] [[package]] @@ -1983,37 +2006,37 @@ PyYAML = ">=5.1.0" [[package]] name = "onnx" -version = "1.16.1" +version = "1.16.2" description = "Open Neural Network Exchange" optional = false python-versions = ">=3.8" files = [ - {file = "onnx-1.16.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:bb2d392e5b7060082c2fb38eb5c44f67eb34ff5f0681bd6f45beff9abc6f7094"}, - {file = "onnx-1.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15abf94a7868eed6db15a8b5024ba570c891cae77ca4d0e7258dabdad76980df"}, - {file = "onnx-1.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6251910e554f811fdd070164b0bc76d76b067b95576cb9dad4d52ae64fe014b5"}, - {file = "onnx-1.16.1-cp310-cp310-win32.whl", hash = "sha256:c11e3b15eee46cd20767e505cc3ba97457ef5ac93c3e459cdfb77943ff8fe9a7"}, - {file = "onnx-1.16.1-cp310-cp310-win_amd64.whl", hash = "sha256:b3d10405706807ec2ef493b2a78519fa0264cf190363e89478585aac1179b596"}, - {file = "onnx-1.16.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:006ba5059c85ce43e89a1486cc0276d0f1a8ec9c6efd1a9334fd3fa0f6e33b64"}, - {file = "onnx-1.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1521ea7cd3497ecaf57d3b5e72d637ca5ebca632122a0806a9df99bedbeecdf8"}, - {file = "onnx-1.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45cf20421aeac03872bea5fd6ebf92abe15c4d1461a2572eb839add5059e2a09"}, - {file = "onnx-1.16.1-cp311-cp311-win32.whl", hash = "sha256:f98e275b4f46a617a9c527e60c02531eae03cf67a04c26db8a1c20acee539533"}, - {file = "onnx-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:95aa20aa65a9035d7543e81713e8b0f611e213fc02171959ef4ee09311d1bf28"}, - {file = "onnx-1.16.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:32e11d39bee04f927fab09f74c46cf76584094462311bab1aca9ccdae6ed3366"}, - {file = "onnx-1.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8884bf53b552873c0c9b072cb8625e7d4e8f3cc0529191632d24e3de58a3b93a"}, - {file = "onnx-1.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595b2830093f81361961295f7b0ebb6000423bcd04123d516d081c306002e387"}, - {file = "onnx-1.16.1-cp312-cp312-win32.whl", hash = "sha256:2fde4dd5bc278b3fc8148f460bce8807b2874c66f48529df9444cdbc9ecf456b"}, - {file = "onnx-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:e69ad8c110d8c37d759cad019d498fdf3fd24e0bfaeb960e52fed0469a5d2974"}, - {file = "onnx-1.16.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:0fc189195a40b5862fb77d97410c89823197fe19c1088ce150444eec72f200c1"}, - {file = "onnx-1.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:496ba17b16a74711081772e1b03f3207959972e351298e51abdc600051027a22"}, - {file = "onnx-1.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3faf239b48418b3ea6fe73bd4d86807b903d0b2ebd20b8b8c84f83741b0f18"}, - {file = "onnx-1.16.1-cp38-cp38-win32.whl", hash = "sha256:18b22143836838591f6551b089196e69f60c47fabce52b4b72b4cb37522645aa"}, - {file = "onnx-1.16.1-cp38-cp38-win_amd64.whl", hash = "sha256:8c2b70d602acfb90056fbdc60ef26f4658f964591212a4e9dbbda922ff43061b"}, - {file = "onnx-1.16.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:2bed6fe05905b073206cabbb4463c58050cf8d544192303c09927b229f93ac14"}, - {file = "onnx-1.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5798414332534a41404a7ff83677d49ced01d70160e1541484cce647f2295051"}, - {file = "onnx-1.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa7518d6d27f357261a4014079dec364cad6fef827d0b3fe1d3ff59939a68394"}, - {file = "onnx-1.16.1-cp39-cp39-win32.whl", hash = "sha256:67f372db4fe8fe61e00b762af5b0833aa72b5baa37e7e2f47d8668964ebff411"}, - {file = "onnx-1.16.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c059fea6229c44d2d39c8f6e2f2f0d676d587c97f4c854c86f3e7bc97e0b31c"}, - {file = "onnx-1.16.1.tar.gz", hash = "sha256:8299193f0f2a3849bfc069641aa8e4f93696602da8d165632af8ee48ec7556b6"}, + {file = "onnx-1.16.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:ab0a1aa6b0470020ea3636afdce3e2a67f856fefe4be8c73b20371b07fcde69c"}, + {file = "onnx-1.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a449122a49534bb9c2b6f16c8493b606ef0accda6b9dbf0c513ca4b31ebe8b38"}, + {file = "onnx-1.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec6a425e59291fff430da4a884aa07a1d0cbb5dcd22cc78f6cf4ba5adb9f3367"}, + {file = "onnx-1.16.2-cp310-cp310-win32.whl", hash = "sha256:55fbaf38acd4cd8fdd0b4f36871fb596b075518d3e981acc893f2ab887d1891a"}, + {file = "onnx-1.16.2-cp310-cp310-win_amd64.whl", hash = "sha256:4e496d301756e0a22fd2bdfac24b861c7b1ddbdd9ce7677b2a252c00c4c8f2a7"}, + {file = "onnx-1.16.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:859b41574243c9bfd0abce03c15c78a1f270cc03c7f99629b984daf7adfa5003"}, + {file = "onnx-1.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39a57d196fe5d73861e70d9625674e6caf8ca13c5e9c740462cf530a07cd2e1c"}, + {file = "onnx-1.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b98aa9733bd4b781eb931d33b4078ff2837e7d68062460726d6dd011f332bd4"}, + {file = "onnx-1.16.2-cp311-cp311-win32.whl", hash = "sha256:e9f018b2e172efeea8c2473a51a825652767726374145d7cfdebdc7a27446fdd"}, + {file = "onnx-1.16.2-cp311-cp311-win_amd64.whl", hash = "sha256:e66e4512a30df8916db5cf84f47d47b3250b9ab9a98d9cffe142c98c54598ba0"}, + {file = "onnx-1.16.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:bfdb8c2eb4c92f55626376e00993db8fcc753da4b80babf28d99636af8dbae6b"}, + {file = "onnx-1.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b77a6c138f284dfc9b06fa370768aa4fd167efc49ff740e2158dd02eedde8d0"}, + {file = "onnx-1.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca12e47965e590b63f31681c8c563c75449a04178f27eac1ff64bad314314fb3"}, + {file = "onnx-1.16.2-cp312-cp312-win32.whl", hash = "sha256:324fe3551e91ffd74b43dbcf1d48e96579f4c1be2ff1224591ecd3ec6daa6139"}, + {file = "onnx-1.16.2-cp312-cp312-win_amd64.whl", hash = "sha256:080b19b0bd2b5536b4c61812464fe495758d6c9cfed3fdd3f20516e616212bee"}, + {file = "onnx-1.16.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:c42a5db2db36fc46d3a93ab6aeff0f11abe10a4a16a85f2aad8879a58a898ee5"}, + {file = "onnx-1.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9635437ffe51cc71343f3067bc548a068bd287ac690f65a9f6223ea9dca441bf"}, + {file = "onnx-1.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9e22be82c3447ba6d2fe851973a736a7013e97b398e8beb7a25fd2ad4df219e"}, + {file = "onnx-1.16.2-cp38-cp38-win32.whl", hash = "sha256:e16012431643c66124eba0089acdad0df71d5c9d4e6bec4721999f9eecab72b7"}, + {file = "onnx-1.16.2-cp38-cp38-win_amd64.whl", hash = "sha256:42231a467e5be2974d426b410987073ed85bee34af7b50c93ab221a8696b0cfd"}, + {file = "onnx-1.16.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:e79edba750ae06059d82d8ff8129a6488a7e692cd23cd7fe010f7ec7d6a14bad"}, + {file = "onnx-1.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d192db8501103fede9c1725861e65ed41efb65da1ce915ba969aae40073eb94"}, + {file = "onnx-1.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01d4a3bd7a0d0ee5084f65441fc9ca38450fc18835b7f9d5da5b9e7ca8b85d"}, + {file = "onnx-1.16.2-cp39-cp39-win32.whl", hash = "sha256:0b765b09bdb01fa2338ea52483aa3d9c75e249f85446f0d9ad1dc5bd2b149082"}, + {file = "onnx-1.16.2-cp39-cp39-win_amd64.whl", hash = "sha256:bfee781a59919e797f4dae380e63a0390ec01ce5c337a1459b992aac2f49a3c2"}, + {file = "onnx-1.16.2.tar.gz", hash = "sha256:b33a282b038813c4b69e73ea65c2909768e8dd6cc10619b70632335daf094646"}, ] [package.dependencies] @@ -2021,46 +2044,46 @@ numpy = ">=1.20" protobuf = ">=3.20.2" [package.extras] -reference = ["Pillow", "google-re2"] +reference = ["google-re2", "pillow"] [[package]] name = "onnxruntime" -version = "1.18.1" +version = "1.19.0" description = "ONNX Runtime is a runtime accelerator for Machine Learning models" optional = false python-versions = "*" files = [ - {file = "onnxruntime-1.18.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:29ef7683312393d4ba04252f1b287d964bd67d5e6048b94d2da3643986c74d80"}, - {file = "onnxruntime-1.18.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc706eb1df06ddf55776e15a30519fb15dda7697f987a2bbda4962845e3cec05"}, - {file = "onnxruntime-1.18.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7de69f5ced2a263531923fa68bbec52a56e793b802fcd81a03487b5e292bc3a"}, - {file = "onnxruntime-1.18.1-cp310-cp310-win32.whl", hash = "sha256:221e5b16173926e6c7de2cd437764492aa12b6811f45abd37024e7cf2ae5d7e3"}, - {file = "onnxruntime-1.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:75211b619275199c861ee94d317243b8a0fcde6032e5a80e1aa9ded8ab4c6060"}, - {file = "onnxruntime-1.18.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:f26582882f2dc581b809cfa41a125ba71ad9e715738ec6402418df356969774a"}, - {file = "onnxruntime-1.18.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef36f3a8b768506d02be349ac303fd95d92813ba3ba70304d40c3cd5c25d6a4c"}, - {file = "onnxruntime-1.18.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:170e711393e0618efa8ed27b59b9de0ee2383bd2a1f93622a97006a5ad48e434"}, - {file = "onnxruntime-1.18.1-cp311-cp311-win32.whl", hash = "sha256:9b6a33419b6949ea34e0dc009bc4470e550155b6da644571ecace4b198b0d88f"}, - {file = "onnxruntime-1.18.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c1380a9f1b7788da742c759b6a02ba771fe1ce620519b2b07309decbd1a2fe1"}, - {file = "onnxruntime-1.18.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:31bd57a55e3f983b598675dfc7e5d6f0877b70ec9864b3cc3c3e1923d0a01919"}, - {file = "onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9e03c4ba9f734500691a4d7d5b381cd71ee2f3ce80a1154ac8f7aed99d1ecaa"}, - {file = "onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:781aa9873640f5df24524f96f6070b8c550c66cb6af35710fd9f92a20b4bfbf6"}, - {file = "onnxruntime-1.18.1-cp312-cp312-win32.whl", hash = "sha256:3a2d9ab6254ca62adbb448222e630dc6883210f718065063518c8f93a32432be"}, - {file = "onnxruntime-1.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:ad93c560b1c38c27c0275ffd15cd7f45b3ad3fc96653c09ce2931179982ff204"}, - {file = "onnxruntime-1.18.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:3b55dc9d3c67626388958a3eb7ad87eb7c70f75cb0f7ff4908d27b8b42f2475c"}, - {file = "onnxruntime-1.18.1-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f80dbcfb6763cc0177a31168b29b4bd7662545b99a19e211de8c734b657e0669"}, - {file = "onnxruntime-1.18.1-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f1ff2c61a16d6c8631796c54139bafea41ee7736077a0fc64ee8ae59432f5c58"}, - {file = "onnxruntime-1.18.1-cp38-cp38-win32.whl", hash = "sha256:219855bd272fe0c667b850bf1a1a5a02499269a70d59c48e6f27f9c8bcb25d02"}, - {file = "onnxruntime-1.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:afdf16aa607eb9a2c60d5ca2d5abf9f448e90c345b6b94c3ed14f4fb7e6a2d07"}, - {file = "onnxruntime-1.18.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:128df253ade673e60cea0955ec9d0e89617443a6d9ce47c2d79eb3f72a3be3de"}, - {file = "onnxruntime-1.18.1-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9839491e77e5c5a175cab3621e184d5a88925ee297ff4c311b68897197f4cde9"}, - {file = "onnxruntime-1.18.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad3187c1faff3ac15f7f0e7373ef4788c582cafa655a80fdbb33eaec88976c66"}, - {file = "onnxruntime-1.18.1-cp39-cp39-win32.whl", hash = "sha256:34657c78aa4e0b5145f9188b550ded3af626651b15017bf43d280d7e23dbf195"}, - {file = "onnxruntime-1.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:9c14fd97c3ddfa97da5feef595e2c73f14c2d0ec1d4ecbea99c8d96603c89589"}, + {file = "onnxruntime-1.19.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:6ce22a98dfec7b646ae305f52d0ce14a189a758b02ea501860ca719f4b0ae04b"}, + {file = "onnxruntime-1.19.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:19019c72873f26927aa322c54cf2bf7312b23451b27451f39b88f57016c94f8b"}, + {file = "onnxruntime-1.19.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8eaa16df99171dc636e30108d15597aed8c4c2dd9dbfdd07cc464d57d73fb275"}, + {file = "onnxruntime-1.19.0-cp310-cp310-win32.whl", hash = "sha256:0eb0f8dbe596fd0f4737fe511fdbb17603853a7d204c5b2ca38d3c7808fc556b"}, + {file = "onnxruntime-1.19.0-cp310-cp310-win_amd64.whl", hash = "sha256:616092d54ba8023b7bc0a5f6d900a07a37cc1cfcc631873c15f8c1d6e9e184d4"}, + {file = "onnxruntime-1.19.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a2b53b3c287cd933e5eb597273926e899082d8c84ab96e1b34035764a1627e17"}, + {file = "onnxruntime-1.19.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e94984663963e74fbb468bde9ec6f19dcf890b594b35e249c4dc8789d08993c5"}, + {file = "onnxruntime-1.19.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f379d1f050cfb55ce015d53727b78ee362febc065c38eed81512b22b757da73"}, + {file = "onnxruntime-1.19.0-cp311-cp311-win32.whl", hash = "sha256:4ccb48faea02503275ae7e79e351434fc43c294c4cb5c4d8bcb7479061396614"}, + {file = "onnxruntime-1.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:9cdc8d311289a84e77722de68bd22b8adfb94eea26f4be6f9e017350faac8b18"}, + {file = "onnxruntime-1.19.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:1b59eaec1be9a8613c5fdeaafe67f73a062edce3ac03bbbdc9e2d98b58a30617"}, + {file = "onnxruntime-1.19.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be4144d014a4b25184e63ce7a463a2e7796e2f3df931fccc6a6aefa6f1365dc5"}, + {file = "onnxruntime-1.19.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10d7e7d4ca7021ce7f29a66dbc6071addf2de5839135339bd855c6d9c2bba371"}, + {file = "onnxruntime-1.19.0-cp312-cp312-win32.whl", hash = "sha256:87f2c58b577a1fb31dc5d92b647ecc588fd5f1ea0c3ad4526f5f80a113357c8d"}, + {file = "onnxruntime-1.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:8a1f50d49676d7b69566536ff039d9e4e95fc482a55673719f46528218ecbb94"}, + {file = "onnxruntime-1.19.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:71423c8c4b2d7a58956271534302ec72721c62a41efd0c4896343249b8399ab0"}, + {file = "onnxruntime-1.19.0-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9d63630d45e9498f96e75bbeb7fd4a56acb10155de0de4d0e18d1b6cbb0b358a"}, + {file = "onnxruntime-1.19.0-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3bfd15db1e8794d379a86c1a9116889f47f2cca40cc82208fc4f7e8c38e8522"}, + {file = "onnxruntime-1.19.0-cp38-cp38-win32.whl", hash = "sha256:3b098003b6b4cb37cc84942e5f1fe27f945dd857cbd2829c824c26b0ba4a247e"}, + {file = "onnxruntime-1.19.0-cp38-cp38-win_amd64.whl", hash = "sha256:cea067a6541d6787d903ee6843401c5b1332a266585160d9700f9f0939443886"}, + {file = "onnxruntime-1.19.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:c4fcff12dc5ca963c5f76b9822bb404578fa4a98c281e8c666b429192799a099"}, + {file = "onnxruntime-1.19.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f6dcad8a4db908fbe70b98c79cea1c8b6ac3316adf4ce93453136e33a524ac59"}, + {file = "onnxruntime-1.19.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4bc449907c6e8d99eee5ae5cc9c8fdef273d801dcd195393d3f9ab8ad3f49522"}, + {file = "onnxruntime-1.19.0-cp39-cp39-win32.whl", hash = "sha256:947febd48405afcf526e45ccff97ff23b15e530434705f734870d22ae7fcf236"}, + {file = "onnxruntime-1.19.0-cp39-cp39-win_amd64.whl", hash = "sha256:f60be47eff5ee77fd28a466b0fd41d7debc42a32179d1ddb21e05d6067d7b48b"}, ] [package.dependencies] coloredlogs = "*" flatbuffers = "*" -numpy = ">=1.21.6,<2.0" +numpy = ">=1.21.6" packaging = "*" protobuf = "*" sympy = "*" @@ -2121,62 +2144,68 @@ dev = ["black", "mypy", "pytest"] [[package]] name = "orjson" -version = "3.10.6" +version = "3.10.7" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.6-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:fb0ee33124db6eaa517d00890fc1a55c3bfe1cf78ba4a8899d71a06f2d6ff5c7"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c1c4b53b24a4c06547ce43e5fee6ec4e0d8fe2d597f4647fc033fd205707365"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eadc8fd310edb4bdbd333374f2c8fec6794bbbae99b592f448d8214a5e4050c0"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61272a5aec2b2661f4fa2b37c907ce9701e821b2c1285d5c3ab0207ebd358d38"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57985ee7e91d6214c837936dc1608f40f330a6b88bb13f5a57ce5257807da143"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:633a3b31d9d7c9f02d49c4ab4d0a86065c4a6f6adc297d63d272e043472acab5"}, - {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1c680b269d33ec444afe2bdc647c9eb73166fa47a16d9a75ee56a374f4a45f43"}, - {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f759503a97a6ace19e55461395ab0d618b5a117e8d0fbb20e70cfd68a47327f2"}, - {file = "orjson-3.10.6-cp310-none-win32.whl", hash = "sha256:95a0cce17f969fb5391762e5719575217bd10ac5a189d1979442ee54456393f3"}, - {file = "orjson-3.10.6-cp310-none-win_amd64.whl", hash = "sha256:df25d9271270ba2133cc88ee83c318372bdc0f2cd6f32e7a450809a111efc45c"}, - {file = "orjson-3.10.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b1ec490e10d2a77c345def52599311849fc063ae0e67cf4f84528073152bb2ba"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d43d3feb8f19d07e9f01e5b9be4f28801cf7c60d0fa0d279951b18fae1932b"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3045267e98fe749408eee1593a142e02357c5c99be0802185ef2170086a863"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27bc6a28ae95923350ab382c57113abd38f3928af3c80be6f2ba7eb8d8db0b0"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d27456491ca79532d11e507cadca37fb8c9324a3976294f68fb1eff2dc6ced5a"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05ac3d3916023745aa3b3b388e91b9166be1ca02b7c7e41045da6d12985685f0"}, - {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1335d4ef59ab85cab66fe73fd7a4e881c298ee7f63ede918b7faa1b27cbe5212"}, - {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4bbc6d0af24c1575edc79994c20e1b29e6fb3c6a570371306db0993ecf144dc5"}, - {file = "orjson-3.10.6-cp311-none-win32.whl", hash = "sha256:450e39ab1f7694465060a0550b3f6d328d20297bf2e06aa947b97c21e5241fbd"}, - {file = "orjson-3.10.6-cp311-none-win_amd64.whl", hash = "sha256:227df19441372610b20e05bdb906e1742ec2ad7a66ac8350dcfd29a63014a83b"}, - {file = "orjson-3.10.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ea2977b21f8d5d9b758bb3f344a75e55ca78e3ff85595d248eee813ae23ecdfb"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6f3d167d13a16ed263b52dbfedff52c962bfd3d270b46b7518365bcc2121eed"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f710f346e4c44a4e8bdf23daa974faede58f83334289df80bc9cd12fe82573c7"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7275664f84e027dcb1ad5200b8b18373e9c669b2a9ec33d410c40f5ccf4b257e"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0943e4c701196b23c240b3d10ed8ecd674f03089198cf503105b474a4f77f21f"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:446dee5a491b5bc7d8f825d80d9637e7af43f86a331207b9c9610e2f93fee22a"}, - {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:64c81456d2a050d380786413786b057983892db105516639cb5d3ee3c7fd5148"}, - {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:960db0e31c4e52fa0fc3ecbaea5b2d3b58f379e32a95ae6b0ebeaa25b93dfd34"}, - {file = "orjson-3.10.6-cp312-none-win32.whl", hash = "sha256:a6ea7afb5b30b2317e0bee03c8d34c8181bc5a36f2afd4d0952f378972c4efd5"}, - {file = "orjson-3.10.6-cp312-none-win_amd64.whl", hash = "sha256:874ce88264b7e655dde4aeaacdc8fd772a7962faadfb41abe63e2a4861abc3dc"}, - {file = "orjson-3.10.6-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:66680eae4c4e7fc193d91cfc1353ad6d01b4801ae9b5314f17e11ba55e934183"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caff75b425db5ef8e8f23af93c80f072f97b4fb3afd4af44482905c9f588da28"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3722fddb821b6036fd2a3c814f6bd9b57a89dc6337b9924ecd614ebce3271394"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2c116072a8533f2fec435fde4d134610f806bdac20188c7bd2081f3e9e0133f"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6eeb13218c8cf34c61912e9df2de2853f1d009de0e46ea09ccdf3d757896af0a"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965a916373382674e323c957d560b953d81d7a8603fbeee26f7b8248638bd48b"}, - {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03c95484d53ed8e479cade8628c9cea00fd9d67f5554764a1110e0d5aa2de96e"}, - {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e060748a04cccf1e0a6f2358dffea9c080b849a4a68c28b1b907f272b5127e9b"}, - {file = "orjson-3.10.6-cp38-none-win32.whl", hash = "sha256:738dbe3ef909c4b019d69afc19caf6b5ed0e2f1c786b5d6215fbb7539246e4c6"}, - {file = "orjson-3.10.6-cp38-none-win_amd64.whl", hash = "sha256:d40f839dddf6a7d77114fe6b8a70218556408c71d4d6e29413bb5f150a692ff7"}, - {file = "orjson-3.10.6-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:697a35a083c4f834807a6232b3e62c8b280f7a44ad0b759fd4dce748951e70db"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd502f96bf5ea9a61cbc0b2b5900d0dd68aa0da197179042bdd2be67e51a1e4b"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f215789fb1667cdc874c1b8af6a84dc939fd802bf293a8334fce185c79cd359b"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2debd8ddce948a8c0938c8c93ade191d2f4ba4649a54302a7da905a81f00b56"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5410111d7b6681d4b0d65e0f58a13be588d01b473822483f77f513c7f93bd3b2"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb1f28a137337fdc18384079fa5726810681055b32b92253fa15ae5656e1dddb"}, - {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf2fbbce5fe7cd1aa177ea3eab2b8e6a6bc6e8592e4279ed3db2d62e57c0e1b2"}, - {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:79b9b9e33bd4c517445a62b90ca0cc279b0f1f3970655c3df9e608bc3f91741a"}, - {file = "orjson-3.10.6-cp39-none-win32.whl", hash = "sha256:30b0a09a2014e621b1adf66a4f705f0809358350a757508ee80209b2d8dae219"}, - {file = "orjson-3.10.6-cp39-none-win_amd64.whl", hash = "sha256:49e3bc615652617d463069f91b867a4458114c5b104e13b7ae6872e5f79d0844"}, - {file = "orjson-3.10.6.tar.gz", hash = "sha256:e54b63d0a7c6c54a5f5f726bc93a2078111ef060fec4ecbf34c5db800ca3b3a7"}, + {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84"}, + {file = "orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175"}, + {file = "orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c"}, + {file = "orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0"}, + {file = "orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f"}, + {file = "orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5"}, + {file = "orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b"}, + {file = "orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb"}, + {file = "orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1"}, + {file = "orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149"}, + {file = "orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad"}, + {file = "orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2"}, + {file = "orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024"}, + {file = "orjson-3.10.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866"}, + {file = "orjson-3.10.7-cp38-none-win32.whl", hash = "sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c"}, + {file = "orjson-3.10.7-cp38-none-win_amd64.whl", hash = "sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e"}, + {file = "orjson-3.10.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5"}, + {file = "orjson-3.10.7-cp39-none-win32.whl", hash = "sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2"}, + {file = "orjson-3.10.7-cp39-none-win_amd64.whl", hash = "sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58"}, + {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, ] [[package]] @@ -2299,13 +2328,13 @@ image = ["Pillow"] [[package]] name = "pdfplumber" -version = "0.11.2" +version = "0.11.4" description = "Plumb a PDF for detailed information about each char, rectangle, and line." optional = false python-versions = ">=3.8" files = [ - {file = "pdfplumber-0.11.2-py3-none-any.whl", hash = "sha256:024a7e0f8f4e7bbec8e1f6f694faeaa7b9fe33a0c1f9edd9d3f77298d9146b87"}, - {file = "pdfplumber-0.11.2.tar.gz", hash = "sha256:f237ce88e9918358f3848f4bae469358ca121ca412098e370908878ec9da699a"}, + {file = "pdfplumber-0.11.4-py3-none-any.whl", hash = "sha256:6150f0678c7aaba974ac09839c17475d6c0c4d126b5f92cb85154885f31c6d73"}, + {file = "pdfplumber-0.11.4.tar.gz", hash = "sha256:147b55cde2351fcb9523b46b09cc771eea3602faecfb60d463c6bf951694fbe8"}, ] [package.dependencies] @@ -2315,53 +2344,56 @@ pypdfium2 = ">=4.18.0" [[package]] name = "pikepdf" -version = "9.1.0" +version = "9.1.2" description = "Read and write PDFs with Python, powered by qpdf" optional = false python-versions = ">=3.8" files = [ - {file = "pikepdf-9.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:836d98f80ca2e387950c3fbefd29ab8be7a2ad30551faa6dca1f0fdb480d8101"}, - {file = "pikepdf-9.1.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:582274ef5db2ff90b0113b31c8411f278d00b0ed92c777deb349d75353f33923"}, - {file = "pikepdf-9.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e5275825aaddebacbb064c361038f1b4ad68a90c6d07603e3bf217ef7b54355"}, - {file = "pikepdf-9.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972c01df05cf68c2b1f1d233a6dfb0c3cc67af133c39c0fe9b549474cfc2825"}, - {file = "pikepdf-9.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:39fd881dfd02cb73f9a2ee5306ca10679acf92f682eea97b24f8a80431671659"}, - {file = "pikepdf-9.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d0e7f2e57f70847b6934abe4bd262e1ea06376859af86cc21e8a198026a389ed"}, - {file = "pikepdf-9.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:13db3edd60ee2f0025e934ad73c36152ec0ac04eff5622a7dc09786cf6501683"}, - {file = "pikepdf-9.1.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:4ef95083f0817bb0c0516fffb074bd5dfcef0c416fa650f45670e6121a49871f"}, - {file = "pikepdf-9.1.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:6fe02620ae88e06def3bcc65fb414901200ca8e64da03d56751ad2df82520ae1"}, - {file = "pikepdf-9.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:352b962bc0c9c9cafc700c7e513badcc389aececfe389c199864b75c03027fbd"}, - {file = "pikepdf-9.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:008bcab5ab1ddaa6b544a269455da447f3e4778c65cb139cbbef5e468d2bc7b7"}, - {file = "pikepdf-9.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1bd508e32e19187adc37a5706ac7d3bfdc845882181ce998fa38b8cede868c64"}, - {file = "pikepdf-9.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03962d9f5a2696f4c86d2a9a27daa16484df7bf9ca10d7e3fd37633019cd9811"}, - {file = "pikepdf-9.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:29cad0883927b4056e8ef8f36fbf08f3c37ff5f34815e1b08c7d69ecfa68fcf1"}, - {file = "pikepdf-9.1.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:414af92d1ab00292cf9adc8f3ebb186bef036e8344c95498c3b5aa9f55ac90ce"}, - {file = "pikepdf-9.1.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:c28d803ce001c0f798c4d348a018be3aa9010c0c980a6e3b3592fd99922086ef"}, - {file = "pikepdf-9.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b156afeafb61bd2aab9f7876fbecabe02b4b9b33cc33a38a1495a97c1fdd8ae"}, - {file = "pikepdf-9.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ade4937e8bd5cf1545480fe58a66aa36612ce5f46f3c245d9566a8cc5ba1d908"}, - {file = "pikepdf-9.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:535ccec10d1067ca2751e01e4d893a4d3f7e8f95b23c51d82978acd16610deb8"}, - {file = "pikepdf-9.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:974cd98fc76ba9b24c84d6ff6ff12cd68a52b453f3fd9905dd15fdef1345018a"}, - {file = "pikepdf-9.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:3a45e4e9f88997f80ff961bc468ef3b72ec04e842ec4e7b13ab51bdaf12d17f9"}, - {file = "pikepdf-9.1.0-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:9cc69aaba73a82ea1ffdc01212179b0725c066e47bf304be6afe0e3b6bff2203"}, - {file = "pikepdf-9.1.0-cp38-cp38-macosx_14_0_arm64.whl", hash = "sha256:91a32e1fa552dbb93ae3145c45624711b4f71af71c090835f57c80a500d4ee4a"}, - {file = "pikepdf-9.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4f89a79cbfed62fa0f3cb44474d2aacb4ea3936a5657bbdba38a1c6e5f7e009"}, - {file = "pikepdf-9.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:426902d0c876c0b4d23b94d50c62944d8edfb77bd45659c7aa4c0f1c1f4856ab"}, - {file = "pikepdf-9.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:d9dbbb9e2f0e3cd65a53c525c892396d77c56ddc5bb1b0f7a73c0fe7b21f63e2"}, - {file = "pikepdf-9.1.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:244850c64c86c4e363e308d2debc782b34ef5e40c8dc8ad457bb5904ac092109"}, - {file = "pikepdf-9.1.0-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:6368bd022ece5a105194b3c30099f50c578549b0544164b905768de110b8f5a5"}, - {file = "pikepdf-9.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de22d23dfd0ffd1b061b2c4beff81d84c42d5902002d5fc745c8fb432ff0093c"}, - {file = "pikepdf-9.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d63eb2375284381e32d7936a0c369b1204403c581c97dca74d364493dc8068b"}, - {file = "pikepdf-9.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2044e71efcfba37d3e3713ea34603a893f7136e2c1e1a46a973f19bb03c1b672"}, - {file = "pikepdf-9.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f1e4ae046ffdca7f169a7be194bcadbed72b928598a89b3f1ba849f88ff116ea"}, - {file = "pikepdf-9.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:946b85f57b7e4ccfba088f14f2051e39b1d791013ade6c2ae63d02e89cd45205"}, - {file = "pikepdf-9.1.0-pp310-pypy310_pp73-macosx_12_0_x86_64.whl", hash = "sha256:9ff1a2c047b3d27e51ced33c1d5fd7b59a1bf34d55449c23a99f64478360ca3c"}, - {file = "pikepdf-9.1.0-pp310-pypy310_pp73-macosx_14_0_arm64.whl", hash = "sha256:aeb3885485a14b724615e9cf772d8351bacfbb455efa4bf96bc76ccd7c8e59ed"}, - {file = "pikepdf-9.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a97cc48e363266524a6e77610725f0307486595ffb651968a550cbde4916a5b"}, - {file = "pikepdf-9.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3b2fbb9b1ff709047885117819ac19b565c9701be7fdd7c87a84eab8805558c8"}, - {file = "pikepdf-9.1.0-pp39-pypy39_pp73-macosx_12_0_x86_64.whl", hash = "sha256:3a81c6540955729194e2be6499c4d303967012bfd5059fae58d6b81eba25c9e9"}, - {file = "pikepdf-9.1.0-pp39-pypy39_pp73-macosx_14_0_arm64.whl", hash = "sha256:6e045f6727ac2342a9b670c66ee7d5926383baba4476c7c7493104e600ba71a2"}, - {file = "pikepdf-9.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12d0c977f271edd18513d211c065b0c674c8a844846fb9fa6590698f823b9fa3"}, - {file = "pikepdf-9.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:58312470c85a25a80f0750dd4220f4d6c87323c9218c9c6d7a83f9a9119217a3"}, - {file = "pikepdf-9.1.0.tar.gz", hash = "sha256:e0c8d0a165b61d62495919b4351b5d2c7f0ebd47ea31d94d82c5203e97e07897"}, + {file = "pikepdf-9.1.2-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d2bb9444c6a8cf0c0ead503a149e9bfa1a5a92643fa42bda088e8916692a87ee"}, + {file = "pikepdf-9.1.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:bc9ea1b3ffd842a86aa217380a2a2c1379e0265a29af73956796747f50bdfb09"}, + {file = "pikepdf-9.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:678bb47232a74a25e201e90bc601a3a9a24a20ba247369d9ebe6db0ef0cb0975"}, + {file = "pikepdf-9.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4aec189b5b4c5084a0cbd394900f5b19bace3653d1e656b09040be9c93b489d"}, + {file = "pikepdf-9.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7ae661386e4af757e1d58682551c9375fe6192613c5ffdf9025718f96ba6ddcc"}, + {file = "pikepdf-9.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5ad534b857287b70d089e2f977a8276ce6fec019f8b5d6b779a08ba5eb2d4e59"}, + {file = "pikepdf-9.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:a7d31e3038416527b4ac10775391f467ed0ab901f25c94ac30bc021506a31e41"}, + {file = "pikepdf-9.1.2-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:22aac73feae7e2f61a469991c3c1951f98b5b54fc4148a4e6e00b94e85aca7b6"}, + {file = "pikepdf-9.1.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:f7960375cc122160ec85be4689d36d8d0dc2bc28053aaeda9bc4ecdb8c4678d2"}, + {file = "pikepdf-9.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3e19161bb2fed87d173dfdbd4e0885a72916b572202dbb5c8aec31268606691"}, + {file = "pikepdf-9.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:077e78f8080935381ff5ead7879f9a5a3e2af048749eeea4b8d567effae36416"}, + {file = "pikepdf-9.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:67cd35bfd7c6211c1eb39376c0136ce1b4760d44cbb89ea2feb368aeec4d9f79"}, + {file = "pikepdf-9.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:88e2b766fce9e8f068bdf6274ac2981a38176274dd09bc84637d9dc55b8d846a"}, + {file = "pikepdf-9.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e1e11cfc9c4c8c71f5454faa1804c6add847e7572cd6482cce690052b2e30095"}, + {file = "pikepdf-9.1.2-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:ee44497855f555d3464ef3b1977c83f31602bd363eb3cdf404fd87b9f7aea79e"}, + {file = "pikepdf-9.1.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:ea95927234a3cfbc3738b7c91f62143b222b01a68fa4d7e2ce74f67a206a8565"}, + {file = "pikepdf-9.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3327abfb30087643344662f0d59ca8b1f15f7d40e839a1798ecbf1ad3471bdb0"}, + {file = "pikepdf-9.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f3569164c33d6e9cb6651d2a01c34318513ce5bbfa069654c8050a572d7bde3"}, + {file = "pikepdf-9.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd2a57e79b82f6ef151b51c9143ea439ec9bdfd22b22198ab6432aecba82bb1e"}, + {file = "pikepdf-9.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ff77e000aac5cc582aa81e364ccf70e292842fb62548898972b25f067c718bc6"}, + {file = "pikepdf-9.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:9676b29e189a6eb65ddf9ed0b5ac5a2369731fec88716bd465a343ac24e36d3b"}, + {file = "pikepdf-9.1.2-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:e66a9b48b738cd1d03f9c2039bc2a08eec251ef034dc9ef00cbf74b745444560"}, + {file = "pikepdf-9.1.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:0e7d748baf9f37e7202c8d38cdd944c49e6fa361cc3bb345ca434f5a4caca8e7"}, + {file = "pikepdf-9.1.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14306354b460b6e868bb4a3eeb40ecac353cb02409e30249c1ed53b7ca3f25a0"}, + {file = "pikepdf-9.1.2-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:e033bdc2fb9bdc9847b4b8c02ddad9644c97028dbd4abdabbaa3b3af18f26465"}, + {file = "pikepdf-9.1.2-cp38-cp38-macosx_14_0_arm64.whl", hash = "sha256:900d96009329a6bcc22ffdd0f89f6f9bda861b4a6c910a13a9a1c5ac9250aed4"}, + {file = "pikepdf-9.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb19ff6659516a012fc6bb3f502477afa200e9ef6d15d35c490b9413fcd665d"}, + {file = "pikepdf-9.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bced6b489f13685d22556d73702cd7f9b80a332c7299de113265c0a32b72abc3"}, + {file = "pikepdf-9.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:7b245a6303ac6fc470c856878d064250ed9fad9e64361bff65f52cb265c5ec61"}, + {file = "pikepdf-9.1.2-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:feca982b1c9bbfe630d0324c4344b18f76cc39c77c3eafd5ad47dcd4cd570802"}, + {file = "pikepdf-9.1.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:aab9c51b3528659d0066061b2fd5d91458a4ff6a16357e69bcefb8f90bc39109"}, + {file = "pikepdf-9.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c35c1995119c04b7e8f2e04aa0f28495846534795d8e7cac99a983b4e8bbfa5"}, + {file = "pikepdf-9.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b6b853bde828ed5c1e3fd8f283b2cdf7047a6fc6e56e571e06931c81d86a8d6"}, + {file = "pikepdf-9.1.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:10222e965afca1145e3b7351d2763da9d1a0b09ceea1d6498851a4ec3e49e042"}, + {file = "pikepdf-9.1.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cc42d5f5400fba10bacc3e57a0d639681a818ef0fd230a42d20f9c58dd88e104"}, + {file = "pikepdf-9.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:d475bfe6cf0616863c81411c0b8481c2ae6774a1a377eb52903d00a19a9bca2d"}, + {file = "pikepdf-9.1.2-pp310-pypy310_pp73-macosx_12_0_x86_64.whl", hash = "sha256:2494ec1f7cd517ec54c7c20615ccf7536a00d1b8b4027a6b809a016763a504e2"}, + {file = "pikepdf-9.1.2-pp310-pypy310_pp73-macosx_14_0_arm64.whl", hash = "sha256:ef0ab4a2d03688fd741675a869cfc6add955665421d747419e2e28f50b961446"}, + {file = "pikepdf-9.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdef63c8791b50158660440270888a83828776b18f6b44a7f53cc484b5070116"}, + {file = "pikepdf-9.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:67c288abc67e7dd61dd7b512361cc241e254f91a1e51c582a4b02dfbd5c2bcf2"}, + {file = "pikepdf-9.1.2-pp39-pypy39_pp73-macosx_12_0_x86_64.whl", hash = "sha256:ca36bce66fcf8b85199f8da009eb5e63f57d4360972ca17ba9021f5a7733db21"}, + {file = "pikepdf-9.1.2-pp39-pypy39_pp73-macosx_14_0_arm64.whl", hash = "sha256:5df82004c1a2bac74aa866d397910d9f9664c0394ddaa56cf2f52ca7abfc7f99"}, + {file = "pikepdf-9.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64e8d1037b484f415b6ed7debf809d9117f64c3a51fd879b941c5cd4e77e3b2a"}, + {file = "pikepdf-9.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2303f1c44bd91beec81e199655c0be6f819e7c9af49bbba7e2423bdf9404074d"}, + {file = "pikepdf-9.1.2.tar.gz", hash = "sha256:e728c178165bdc087a7fdf62e4457201a3d03581a7f6270354fb8c78122e105b"}, ] [package.dependencies] @@ -2371,7 +2403,7 @@ packaging = "*" Pillow = ">=10.0.1" [package.extras] -dev = ["pre-commit", "typer[all]"] +dev = ["pre-commit", "typer"] docs = ["Sphinx (>=3)", "sphinx-autoapi", "sphinx-design", "sphinx-issues", "sphinx-rtd-theme", "tomli"] mypy = ["lxml-stubs", "types-Pillow", "types-requests", "types-setuptools"] test = ["attrs (>=20.2.0)", "coverage[toml]", "hypothesis (>=6.36)", "numpy (>=1.21.0)", "psutil (>=5.9)", "pybind11", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "pytest-timeout (>=2.1.0)", "pytest-xdist (>=2.5.0)", "python-dateutil (>=2.8.1)", "python-xmp-toolkit (>=2.0.1)", "tomli"] @@ -2475,57 +2507,63 @@ xmp = ["defusedxml"] [[package]] name = "pillow-heif" -version = "0.17.0" +version = "0.18.0" description = "Python interface for libheif library" optional = false python-versions = ">=3.8" files = [ - {file = "pillow_heif-0.17.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:70fa89a6ed62d4eb993f1adcd72a547861e7a947714bad87f0c0b201ef04c28c"}, - {file = "pillow_heif-0.17.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:7a201d9f086ab4c475691936e40e063529f401a0a53f70ef3b1e5404726b80fd"}, - {file = "pillow_heif-0.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f29d98668785fbb8ddcf0cf5e86f8676c2a4d7dc3c3ee51a37304c6047cea4d"}, - {file = "pillow_heif-0.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab279e6b5a5d65ee30db49d55668b9b1b2756827a251c2c6577d7dcaa152c1a9"}, - {file = "pillow_heif-0.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5d360eac8a066739fa01a9d14954a288bc52eb9ac90e924d85b0ab89502626d6"}, - {file = "pillow_heif-0.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:861cd3783df852c28d1c7434562f59dc1f10f38746c5bbf0be4fafc8516a493b"}, - {file = "pillow_heif-0.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:3d2f4b33eb0ea85d5afae3d340aa60781570ca56513666b22fe1fa94a6546ab8"}, - {file = "pillow_heif-0.17.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:7991509a4d0291116a26b9cd9f3482e4b2a4e3cdd9950de33cd78dae53b7a94d"}, - {file = "pillow_heif-0.17.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:b85cdcdcec561e28b3f72d181342a6fe72298a4da7ad6fe973161efb699f9555"}, - {file = "pillow_heif-0.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23eeda0ce7d22d35f806c48c9f4e9eccfb7f5cf7f54f459ca71d053ed6e7e84e"}, - {file = "pillow_heif-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac7505dbdea5467ba5eb4013c06915f9b42f893b4856ac957f4568317814268"}, - {file = "pillow_heif-0.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ccdbdf6cc7e63b6dc886c1e258f84f9ca622611c0f63ecfd7e453e026aa18a35"}, - {file = "pillow_heif-0.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2055dd6a9f2ccfe68bcfb375aac667923ca330adea4c21d46e8b0541c942768f"}, - {file = "pillow_heif-0.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:853637f56b431d77750390fb3327f4e7025a755c1bcee9d209384f4b24583598"}, - {file = "pillow_heif-0.17.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:29e5bc3a6529e1247f974386f55e4a2932dbdb8b8407496c95e7fd5c30a695a6"}, - {file = "pillow_heif-0.17.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:28226858905df14879a856d11179cf765c2728306ed686802bb7c8fff5e77a82"}, - {file = "pillow_heif-0.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4992e51c5e6968957e083fdd2b93865396b00755a4c8e53c71c0233cc53637f"}, - {file = "pillow_heif-0.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e460433a23aab5c0e92fb6700ee3c2aec32e680daa12520bd784f263bf95319f"}, - {file = "pillow_heif-0.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4e193d3d078c301fd3ecd136ca4e351df6ae7a3b5837f48828bb9e78390a3a88"}, - {file = "pillow_heif-0.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:72d3d32756fb039a062f239c5890a2757d0ca8b36c6a4dd1b6c81bbecc36d646"}, - {file = "pillow_heif-0.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:aef99e3201be61e1b2322faf982765327f7423b8a065c31c24761aecf5df161e"}, - {file = "pillow_heif-0.17.0-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:7d82702167839a14f95169e5d4ecae7c31e2669a6c0772b6cb7eae4d74caf69b"}, - {file = "pillow_heif-0.17.0-cp38-cp38-macosx_14_0_arm64.whl", hash = "sha256:5b8983e5dc8156e1af5a26335b086d85ed3f8e570be5f922546ef131e8cf9c00"}, - {file = "pillow_heif-0.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e0dcc4e3901c8d2bd099e670c62f431895d2d7a5a0979da89865508d41e1a06"}, - {file = "pillow_heif-0.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:444408526de16e55509077cce83d1893ba14e47e9f1b61e1a83aa9a23dcb8eeb"}, - {file = "pillow_heif-0.17.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d0d92e835b57ddff51a06442d5c701eae4a8da32d4dcd14bc0fa74dfccc35ac7"}, - {file = "pillow_heif-0.17.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:1c9a69b8f1ff131ae3d11ba86386b7ac1872702d8529a055cb694cffbd876a7b"}, - {file = "pillow_heif-0.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:27074ae9ded6576bc99d75d5c9911736711d44bb2a383177e305db6dbea1da27"}, - {file = "pillow_heif-0.17.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:e1aae72d0a608525aa041ed3853198f9e5d16bc1ccb0e3c5320ec08be5ed8c7f"}, - {file = "pillow_heif-0.17.0-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:80f2156b7d75abdc3b69c4688192be56cb40db9a423d67c375f34099814aed1a"}, - {file = "pillow_heif-0.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:190a69a5695fe6e661024b7266c3282aad4c582ca172ce4cb3851986e0635d7f"}, - {file = "pillow_heif-0.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:130b8ba2c4116dbaaa531052fe3868ab98e078ab8d51efb9453ca9c787dc3517"}, - {file = "pillow_heif-0.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:11545360fd2d52b8319988c73f58caba6de6c3a49b28f0808a0cf48a29f860c2"}, - {file = "pillow_heif-0.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ec51216fa9373a6efe493614766e4fc7ad715d5496c049a7019c7890a0eeb9bc"}, - {file = "pillow_heif-0.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:dea62e7559577b69354672f3f23c3c8d308c14015965ab97f9d4440229ae6d70"}, - {file = "pillow_heif-0.17.0-pp310-pypy310_pp73-macosx_12_0_x86_64.whl", hash = "sha256:d787ee558500d0222b8ab30a520512109c39fe76c25cea613295c83a808d3589"}, - {file = "pillow_heif-0.17.0-pp310-pypy310_pp73-macosx_14_0_arm64.whl", hash = "sha256:4cbe8e609c363d6c41f5fd52aa4c1274927b88df1ae8b96ff3dee1d5549b58e2"}, - {file = "pillow_heif-0.17.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd469939924a9722e4353d3770d27d9fa8a5a2a869519f48f190a8bc6694d995"}, - {file = "pillow_heif-0.17.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58fdcb04b6c3a8802f94415bd6e2d20df2d79cf6947e4ff9a1b1cdfdb13a187a"}, - {file = "pillow_heif-0.17.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fd5b9d27ffb06ce81a8d4c86ca026ce7644108cc27802a8122a52b7863fb1af7"}, - {file = "pillow_heif-0.17.0-pp39-pypy39_pp73-macosx_12_0_x86_64.whl", hash = "sha256:eed517d85d2ce15b8fdaf382bcd04947e87d961e0dceb258c161167ed4a6cbb0"}, - {file = "pillow_heif-0.17.0-pp39-pypy39_pp73-macosx_14_0_arm64.whl", hash = "sha256:f70150db79a3c2e9d1f93263dc88810f265c3d86968b8954bc563f487672a999"}, - {file = "pillow_heif-0.17.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cda9f1d67bbb8e2b4d976e6113d876b27bea9544a95e37ee7cb8e53f242598d"}, - {file = "pillow_heif-0.17.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c27fe963fc279128519e5845556e8c57622b3d504df1b63aa06038086de0799e"}, - {file = "pillow_heif-0.17.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a10cdb3d525d22fc31b268f55740352ac2adf357016a87b16116b0d7df29198f"}, - {file = "pillow_heif-0.17.0.tar.gz", hash = "sha256:9541ffd1f80ff3f12da26aba76eb6da458fafa5f8bf4ee16279de0e2a5464ba9"}, + {file = "pillow_heif-0.18.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:e1ad1d97f42fc39de8639b3f45d4d053e00158fc64f1674a14d8912cf81791e3"}, + {file = "pillow_heif-0.18.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:c45b8d19d8bb1fc61f1f648d042da16d9085506055665a64b56ce8d8ed83c42b"}, + {file = "pillow_heif-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d524458837bdc5410f66de8e68e864bd179d19a1c205daf7f8c9a07194cc5615"}, + {file = "pillow_heif-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f032593b3cfc96970efc91860ef6eaa62b1e661418d7f9ec186dff9ac7c9844"}, + {file = "pillow_heif-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06649ea6bfac8ca5e7ac898c78c4aad2fd0bc1ce278fa86c503170010902193b"}, + {file = "pillow_heif-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8612f4c2e8a3388647c3ce0b7810398cf941aceecd4b2c7790331a53117baf10"}, + {file = "pillow_heif-0.18.0-cp310-cp310-win_amd64.whl", hash = "sha256:a7cc374452f5b00cf44171a7bfc08c016b0c0a9f226a99369ffbeb13fd45fa7b"}, + {file = "pillow_heif-0.18.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:a5d8bfcf8b23b67b8937bcc25fd464f1ca383d3d1d65220463be81ccf6c8185b"}, + {file = "pillow_heif-0.18.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:2a4c41e76f2da4e046f170cb3716b7aedc466a194509bc0bf1a7c735d5278b8c"}, + {file = "pillow_heif-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68781589ee03bf4bd670e55444c25cb0784451b0beacfb0f79d7f56ae497a767"}, + {file = "pillow_heif-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8782f9f82c534f4a37ce3c609505f992f340a495da1247951108218a201d0e9"}, + {file = "pillow_heif-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2902460d60843e2c379320f1b752a6d4879e3ab0479674ee323d7dee487cccc8"}, + {file = "pillow_heif-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c8bebd570446a7b4f7db5ca8eb333dd4591fda13524bc49eee34b3f5cf40741b"}, + {file = "pillow_heif-0.18.0-cp311-cp311-win_amd64.whl", hash = "sha256:4476bbd7bb7cc1d94c35f0c85786dbe528661bc937422db03fdc865b9ee91d30"}, + {file = "pillow_heif-0.18.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:c795e7ccceea33e01e49ce536139f94cabb1bf017393666f76c05a9daebae2da"}, + {file = "pillow_heif-0.18.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:4dd5b3ec09be45c1ef63be31773df90e18ee08e5e950018b0a349924b54a24ac"}, + {file = "pillow_heif-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb2eade59c2654c2643a3b637de37c19e75a77c66a3e9a5e0ae26210e4f48aee"}, + {file = "pillow_heif-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35b59d599bfdb8454739db6b92f0841ecadbe887babb5ed5abd5299587843eef"}, + {file = "pillow_heif-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:26a0b8b8e899e7bcc876ee61fcadb0f0b849bd6a0d5c20f0e969c77a43b40568"}, + {file = "pillow_heif-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0276a3e0c667677ed0c67f4512cdf2f674065018049307ba4de5cb4648b0a33e"}, + {file = "pillow_heif-0.18.0-cp312-cp312-win_amd64.whl", hash = "sha256:5916fa31f2015626dd2372d14e24521ea6caed11b25be14faa9b9c67731087ce"}, + {file = "pillow_heif-0.18.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:ca554f086bc146f1a798adcd77fdecd81564cc0cd74802ee61e3869ab87282f7"}, + {file = "pillow_heif-0.18.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:2821d30d22bbb94c2a0fae25eb566421bf22c909958e031d3f0973b482b88515"}, + {file = "pillow_heif-0.18.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8376309e178f39a2891183cb9662f1c2c87b8614ff13871f077f89edf65ecf48"}, + {file = "pillow_heif-0.18.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3549e26a65e04e7e986888993b03aae0f848576c2404b5edf12d7db76ef2e72b"}, + {file = "pillow_heif-0.18.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0138a08ed90e54c230878c0b8cb92447ad591b7b2e86bfca145029322ba384c7"}, + {file = "pillow_heif-0.18.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f552419c8bd754603f1dfbc7f8cdd666118fdd3d063d67974c5bd5a8d7fed9de"}, + {file = "pillow_heif-0.18.0-cp313-cp313-win_amd64.whl", hash = "sha256:be148b8463ac5d25fdf94d70c69a53712890cd3974ead906c98e7bf35fc96ba6"}, + {file = "pillow_heif-0.18.0-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:bca173920f16ea8d1c40a970f002be15ac34a5fa99d39403a85472e265db2357"}, + {file = "pillow_heif-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e8e9860259688700f13baed015632c4ddaf813d26cc856e37ebf0a3f171661"}, + {file = "pillow_heif-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b9080d96d51158774e3022fc9af19b650863cbb23fac991458cb354b1aa63d"}, + {file = "pillow_heif-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5b2ee478e373c0502dc431b22840dd0c551c4ce0e1007ab13f038a868ed375c1"}, + {file = "pillow_heif-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:35d2d7199dc34f28aef39cec984c043b1ee30d5c46048566584dc61cf4108c09"}, + {file = "pillow_heif-0.18.0-cp38-cp38-win_amd64.whl", hash = "sha256:d399ac5fb499c8feb9770503db25073dfeaccd01238bcb6aaf01354cd83db123"}, + {file = "pillow_heif-0.18.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:3af89fa7a73143bc49fc18f92b1a6c0fa68ecb56fb56224fb369c2f56729fbb6"}, + {file = "pillow_heif-0.18.0-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:8b0b9a66e604aef2d0a19a7cb2247c5f9b3352827bb1b00816053ce4982ec8ab"}, + {file = "pillow_heif-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02af8950d190e3bea28ed2d0ca40798eeae88eaf6e099ee44ec654667f979d97"}, + {file = "pillow_heif-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:165430447de0f7da259eb07d9487571784912a64c75cd0c52d0d506c114ec7ce"}, + {file = "pillow_heif-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7666cbab98246cf9355625e66bf1bb885fdcf8ff4a917f4db04231e80ea692ee"}, + {file = "pillow_heif-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f403aadee232509ee2fdedbfda2dd0bae75098e70a8ddcc010061f92ab962517"}, + {file = "pillow_heif-0.18.0-cp39-cp39-win_amd64.whl", hash = "sha256:7ed6cc5ea21f04b15b7604e20592e0ee760ee10fb2da2209b85c94bf0b6f1034"}, + {file = "pillow_heif-0.18.0-pp310-pypy310_pp73-macosx_12_0_x86_64.whl", hash = "sha256:744b8a00a817e7139a7e2fd296092689116700dfd63e34941abdc8ae85b3a982"}, + {file = "pillow_heif-0.18.0-pp310-pypy310_pp73-macosx_14_0_arm64.whl", hash = "sha256:89b1d63be7e8036ab45f0cd58e27e54113cfd7e852e91606b5cec4fa788a503f"}, + {file = "pillow_heif-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a8ab9803d79e84e7751cc0930d381112efbf71461ca123a5c2b7abf1050c72"}, + {file = "pillow_heif-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7649431ea30a9c342888d814f07d2aed9cab06ef5fe5bf065d514eceb2c8d24e"}, + {file = "pillow_heif-0.18.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8ee07e334f99bdf399d207bb19653496c65fcbf006f2cee964047f6d6d57acd9"}, + {file = "pillow_heif-0.18.0-pp39-pypy39_pp73-macosx_12_0_x86_64.whl", hash = "sha256:4148a7f17d184c815f428949d6e60582136ef0802a7462c842ee9fe15ca9be16"}, + {file = "pillow_heif-0.18.0-pp39-pypy39_pp73-macosx_14_0_arm64.whl", hash = "sha256:5908e8079f62ec8ace9e7c554691a82ece088d0945d980a877e981f208e85193"}, + {file = "pillow_heif-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28358d5e4e3aeb4af2a60a20187099ba03ab8619bcec8212900657371778da96"}, + {file = "pillow_heif-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8988a9ce18425aff189913905ce28e61220061c3f222e08213eb473b88a41a20"}, + {file = "pillow_heif-0.18.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7ef23775de70124a02ad9be69af58126ec63a2e2987495355e75cabb265c01cb"}, + {file = "pillow_heif-0.18.0.tar.gz", hash = "sha256:70318dad9faa76121c6592ac0ab59881ff0dac6ab791a922e70d82c7706cce88"}, ] [package.dependencies] @@ -2590,22 +2628,22 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "5.27.2" +version = "5.27.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-5.27.2-cp310-abi3-win32.whl", hash = "sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38"}, - {file = "protobuf-5.27.2-cp310-abi3-win_amd64.whl", hash = "sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505"}, - {file = "protobuf-5.27.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5"}, - {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b"}, - {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e"}, - {file = "protobuf-5.27.2-cp38-cp38-win32.whl", hash = "sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863"}, - {file = "protobuf-5.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6"}, - {file = "protobuf-5.27.2-cp39-cp39-win32.whl", hash = "sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca"}, - {file = "protobuf-5.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce"}, - {file = "protobuf-5.27.2-py3-none-any.whl", hash = "sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470"}, - {file = "protobuf-5.27.2.tar.gz", hash = "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714"}, + {file = "protobuf-5.27.3-cp310-abi3-win32.whl", hash = "sha256:dcb307cd4ef8fec0cf52cb9105a03d06fbb5275ce6d84a6ae33bc6cf84e0a07b"}, + {file = "protobuf-5.27.3-cp310-abi3-win_amd64.whl", hash = "sha256:16ddf3f8c6c41e1e803da7abea17b1793a97ef079a912e42351eabb19b2cffe7"}, + {file = "protobuf-5.27.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:68248c60d53f6168f565a8c76dc58ba4fa2ade31c2d1ebdae6d80f969cdc2d4f"}, + {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b8a994fb3d1c11156e7d1e427186662b64694a62b55936b2b9348f0a7c6625ce"}, + {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:a55c48f2a2092d8e213bd143474df33a6ae751b781dd1d1f4d953c128a415b25"}, + {file = "protobuf-5.27.3-cp38-cp38-win32.whl", hash = "sha256:043853dcb55cc262bf2e116215ad43fa0859caab79bb0b2d31b708f128ece035"}, + {file = "protobuf-5.27.3-cp38-cp38-win_amd64.whl", hash = "sha256:c2a105c24f08b1e53d6c7ffe69cb09d0031512f0b72f812dd4005b8112dbe91e"}, + {file = "protobuf-5.27.3-cp39-cp39-win32.whl", hash = "sha256:c84eee2c71ed83704f1afbf1a85c3171eab0fd1ade3b399b3fad0884cbcca8bf"}, + {file = "protobuf-5.27.3-cp39-cp39-win_amd64.whl", hash = "sha256:af7c0b7cfbbb649ad26132e53faa348580f844d9ca46fd3ec7ca48a1ea5db8a1"}, + {file = "protobuf-5.27.3-py3-none-any.whl", hash = "sha256:8572c6533e544ebf6899c360e91d6bcbbee2549251643d32c52cf8a5de295ba5"}, + {file = "protobuf-5.27.3.tar.gz", hash = "sha256:82460903e640f2b7e34ee81a947fdaad89de796d324bcbc38ff5430bcdead82c"}, ] [[package]] @@ -3108,163 +3146,181 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "rapidfuzz" -version = "3.9.4" +version = "3.9.6" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.8" files = [ - {file = "rapidfuzz-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c9b9793c19bdf38656c8eaefbcf4549d798572dadd70581379e666035c9df781"}, - {file = "rapidfuzz-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:015b5080b999404fe06ec2cb4f40b0be62f0710c926ab41e82dfbc28e80675b4"}, - {file = "rapidfuzz-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acc5ceca9c1e1663f3e6c23fb89a311f69b7615a40ddd7645e3435bf3082688a"}, - {file = "rapidfuzz-3.9.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1424e238bc3f20e1759db1e0afb48a988a9ece183724bef91ea2a291c0b92a95"}, - {file = "rapidfuzz-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed01378f605aa1f449bee82cd9c83772883120d6483e90aa6c5a4ce95dc5c3aa"}, - {file = "rapidfuzz-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb26d412271e5a76cdee1c2d6bf9881310665d3fe43b882d0ed24edfcb891a84"}, - {file = "rapidfuzz-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f37e9e1f17be193c41a31c864ad4cd3ebd2b40780db11cd5c04abf2bcf4201b"}, - {file = "rapidfuzz-3.9.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d070ec5cf96b927c4dc5133c598c7ff6db3b833b363b2919b13417f1002560bc"}, - {file = "rapidfuzz-3.9.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:10e61bb7bc807968cef09a0e32ce253711a2d450a4dce7841d21d45330ffdb24"}, - {file = "rapidfuzz-3.9.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:31a2fc60bb2c7face4140010a7aeeafed18b4f9cdfa495cc644a68a8c60d1ff7"}, - {file = "rapidfuzz-3.9.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fbebf1791a71a2e89f5c12b78abddc018354d5859e305ec3372fdae14f80a826"}, - {file = "rapidfuzz-3.9.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:aee9fc9e3bb488d040afc590c0a7904597bf4ccd50d1491c3f4a5e7e67e6cd2c"}, - {file = "rapidfuzz-3.9.4-cp310-cp310-win32.whl", hash = "sha256:005a02688a51c7d2451a2d41c79d737aa326ff54167211b78a383fc2aace2c2c"}, - {file = "rapidfuzz-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:3a2e75e41ee3274754d3b2163cc6c82cd95b892a85ab031f57112e09da36455f"}, - {file = "rapidfuzz-3.9.4-cp310-cp310-win_arm64.whl", hash = "sha256:2c99d355f37f2b289e978e761f2f8efeedc2b14f4751d9ff7ee344a9a5ca98d9"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:07141aa6099e39d48637ce72a25b893fc1e433c50b3e837c75d8edf99e0c63e1"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:db1664eaff5d7d0f2542dd9c25d272478deaf2c8412e4ad93770e2e2d828e175"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc01a223f6605737bec3202e94dcb1a449b6c76d46082cfc4aa980f2a60fd40e"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1869c42e73e2a8910b479be204fa736418741b63ea2325f9cc583c30f2ded41a"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:62ea7007941fb2795fff305ac858f3521ec694c829d5126e8f52a3e92ae75526"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:698e992436bf7f0afc750690c301215a36ff952a6dcd62882ec13b9a1ebf7a39"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b76f611935f15a209d3730c360c56b6df8911a9e81e6a38022efbfb96e433bab"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129627d730db2e11f76169344a032f4e3883d34f20829419916df31d6d1338b1"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:90a82143c14e9a14b723a118c9ef8d1bbc0c5a16b1ac622a1e6c916caff44dd8"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ded58612fe3b0e0d06e935eaeaf5a9fd27da8ba9ed3e2596307f40351923bf72"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f16f5d1c4f02fab18366f2d703391fcdbd87c944ea10736ca1dc3d70d8bd2d8b"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:26aa7eece23e0df55fb75fbc2a8fb678322e07c77d1fd0e9540496e6e2b5f03e"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-win32.whl", hash = "sha256:f187a9c3b940ce1ee324710626daf72c05599946bd6748abe9e289f1daa9a077"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8e9130fe5d7c9182990b366ad78fd632f744097e753e08ace573877d67c32f8"}, - {file = "rapidfuzz-3.9.4-cp311-cp311-win_arm64.whl", hash = "sha256:40419e98b10cd6a00ce26e4837a67362f658fc3cd7a71bd8bd25c99f7ee8fea5"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b5d5072b548db1b313a07d62d88fe0b037bd2783c16607c647e01b070f6cf9e5"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cf5bcf22e1f0fd273354462631d443ef78d677f7d2fc292de2aec72ae1473e66"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c8fc973adde8ed52810f590410e03fb6f0b541bbaeb04c38d77e63442b2df4c"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2464bb120f135293e9a712e342c43695d3d83168907df05f8c4ead1612310c7"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8d9d58689aca22057cf1a5851677b8a3ccc9b535ca008c7ed06dc6e1899f7844"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:167e745f98baa0f3034c13583e6302fb69249a01239f1483d68c27abb841e0a1"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db0bf0663b4b6da1507869722420ea9356b6195aa907228d6201303e69837af9"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cd6ac61b74fdb9e23f04d5f068e6cf554f47e77228ca28aa2347a6ca8903972f"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:60ff67c690acecf381759c16cb06c878328fe2361ddf77b25d0e434ea48a29da"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:cb934363380c60f3a57d14af94325125cd8cded9822611a9f78220444034e36e"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fe833493fb5cc5682c823ea3e2f7066b07612ee8f61ecdf03e1268f262106cdd"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2797fb847d89e04040d281cb1902cbeffbc4b5131a5c53fc0db490fd76b2a547"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-win32.whl", hash = "sha256:52e3d89377744dae68ed7c84ad0ddd3f5e891c82d48d26423b9e066fc835cc7c"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:c76da20481c906e08400ee9be230f9e611d5931a33707d9df40337c2655c84b5"}, - {file = "rapidfuzz-3.9.4-cp312-cp312-win_arm64.whl", hash = "sha256:f2d2846f3980445864c7e8b8818a29707fcaff2f0261159ef6b7bd27ba139296"}, - {file = "rapidfuzz-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:355fc4a268ffa07bab88d9adee173783ec8d20136059e028d2a9135c623c44e6"}, - {file = "rapidfuzz-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4d81a78f90269190b568a8353d4ea86015289c36d7e525cd4d43176c88eff429"}, - {file = "rapidfuzz-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e618625ffc4660b26dc8e56225f8b966d5842fa190e70c60db6cd393e25b86e"}, - {file = "rapidfuzz-3.9.4-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b712336ad6f2bacdbc9f1452556e8942269ef71f60a9e6883ef1726b52d9228a"}, - {file = "rapidfuzz-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc1ee19fdad05770c897e793836c002344524301501d71ef2e832847425707"}, - {file = "rapidfuzz-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1950f8597890c0c707cb7e0416c62a1cf03dcdb0384bc0b2dbda7e05efe738ec"}, - {file = "rapidfuzz-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a6c35f272ec9c430568dc8c1c30cb873f6bc96be2c79795e0bce6db4e0e101d"}, - {file = "rapidfuzz-3.9.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:1df0f9e9239132a231c86ae4f545ec2b55409fa44470692fcfb36b1bd00157ad"}, - {file = "rapidfuzz-3.9.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d2c51955329bfccf99ae26f63d5928bf5be9fcfcd9f458f6847fd4b7e2b8986c"}, - {file = "rapidfuzz-3.9.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:3c522f462d9fc504f2ea8d82e44aa580e60566acc754422c829ad75c752fbf8d"}, - {file = "rapidfuzz-3.9.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:d8a52fc50ded60d81117d7647f262c529659fb21d23e14ebfd0b35efa4f1b83d"}, - {file = "rapidfuzz-3.9.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:04dbdfb0f0bfd3f99cf1e9e24fadc6ded2736d7933f32f1151b0f2abb38f9a25"}, - {file = "rapidfuzz-3.9.4-cp38-cp38-win32.whl", hash = "sha256:4968c8bd1df84b42f382549e6226710ad3476f976389839168db3e68fd373298"}, - {file = "rapidfuzz-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:3fe4545f89f8d6c27b6bbbabfe40839624873c08bd6700f63ac36970a179f8f5"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f256c8fb8f3125574c8c0c919ab0a1f75d7cba4d053dda2e762dcc36357969d"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f5fdc09cf6e9d8eac3ce48a4615b3a3ee332ea84ac9657dbbefef913b13e632f"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d395d46b80063d3b5d13c0af43d2c2cedf3ab48c6a0c2aeec715aa5455b0c632"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7fa714fb96ce9e70c37e64c83b62fe8307030081a0bfae74a76fac7ba0f91715"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1bc1a0f29f9119be7a8d3c720f1d2068317ae532e39e4f7f948607c3a6de8396"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6022674aa1747d6300f699cd7c54d7dae89bfe1f84556de699c4ac5df0838082"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcb72e5f9762fd469701a7e12e94b924af9004954f8c739f925cb19c00862e38"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ad04ae301129f0eb5b350a333accd375ce155a0c1cec85ab0ec01f770214e2e4"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f46a22506f17c0433e349f2d1dc11907c393d9b3601b91d4e334fa9a439a6a4d"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:01b42a8728c36011718da409aa86b84984396bf0ca3bfb6e62624f2014f6022c"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:e590d5d5443cf56f83a51d3c4867bd1f6be8ef8cfcc44279522bcef3845b2a51"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4c72078b5fdce34ba5753f9299ae304e282420e6455e043ad08e4488ca13a2b0"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-win32.whl", hash = "sha256:f75639277304e9b75e6a7b3c07042d2264e16740a11e449645689ed28e9c2124"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:e81e27e8c32a1e1278a4bb1ce31401bfaa8c2cc697a053b985a6f8d013df83ec"}, - {file = "rapidfuzz-3.9.4-cp39-cp39-win_arm64.whl", hash = "sha256:15bc397ee9a3ed1210b629b9f5f1da809244adc51ce620c504138c6e7095b7bd"}, - {file = "rapidfuzz-3.9.4-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:20488ade4e1ddba3cfad04f400da7a9c1b91eff5b7bd3d1c50b385d78b587f4f"}, - {file = "rapidfuzz-3.9.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:e61b03509b1a6eb31bc5582694f6df837d340535da7eba7bedb8ae42a2fcd0b9"}, - {file = "rapidfuzz-3.9.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:098d231d4e51644d421a641f4a5f2f151f856f53c252b03516e01389b2bfef99"}, - {file = "rapidfuzz-3.9.4-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17ab8b7d10fde8dd763ad428aa961c0f30a1b44426e675186af8903b5d134fb0"}, - {file = "rapidfuzz-3.9.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e272df61bee0a056a3daf99f9b1bd82cf73ace7d668894788139c868fdf37d6f"}, - {file = "rapidfuzz-3.9.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d6481e099ff8c4edda85b8b9b5174c200540fd23c8f38120016c765a86fa01f5"}, - {file = "rapidfuzz-3.9.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ad61676e9bdae677d577fe80ec1c2cea1d150c86be647e652551dcfe505b1113"}, - {file = "rapidfuzz-3.9.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:af65020c0dd48d0d8ae405e7e69b9d8ae306eb9b6249ca8bf511a13f465fad85"}, - {file = "rapidfuzz-3.9.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d38b4e026fcd580e0bda6c0ae941e0e9a52c6bc66cdce0b8b0da61e1959f5f8"}, - {file = "rapidfuzz-3.9.4-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f74ed072c2b9dc6743fb19994319d443a4330b0e64aeba0aa9105406c7c5b9c2"}, - {file = "rapidfuzz-3.9.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aee5f6b8321f90615c184bd8a4c676e9becda69b8e4e451a90923db719d6857c"}, - {file = "rapidfuzz-3.9.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3a555e3c841d6efa350f862204bb0a3fea0c006b8acc9b152b374fa36518a1c6"}, - {file = "rapidfuzz-3.9.4-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0772150d37bf018110351c01d032bf9ab25127b966a29830faa8ad69b7e2f651"}, - {file = "rapidfuzz-3.9.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:addcdd3c3deef1bd54075bd7aba0a6ea9f1d01764a08620074b7a7b1e5447cb9"}, - {file = "rapidfuzz-3.9.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fe86b82b776554add8f900b6af202b74eb5efe8f25acdb8680a5c977608727f"}, - {file = "rapidfuzz-3.9.4-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0fc91ac59f4414d8542454dfd6287a154b8e6f1256718c898f695bdbb993467"}, - {file = "rapidfuzz-3.9.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a944e546a296a5fdcaabb537b01459f1b14d66f74e584cb2a91448bffadc3c1"}, - {file = "rapidfuzz-3.9.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4fb96ba96d58c668a17a06b5b5e8340fedc26188e87b0d229d38104556f30cd8"}, - {file = "rapidfuzz-3.9.4.tar.gz", hash = "sha256:366bf8947b84e37f2f4cf31aaf5f37c39f620d8c0eddb8b633e6ba0129ca4a0a"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a7ed0d0b9c85720f0ae33ac5efc8dc3f60c1489dad5c29d735fbdf2f66f0431f"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f3deff6ab7017ed21b9aec5874a07ad13e6b2a688af055837f88b743c7bfd947"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3f9fc060160507b2704f7d1491bd58453d69689b580cbc85289335b14fe8ca"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e86c2b3827fa6169ad6e7d4b790ce02a20acefb8b78d92fa4249589bbc7a2c"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f982e1aafb4bd8207a5e073b1efef9e68a984e91330e1bbf364f9ed157ed83f0"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9196a51d0ec5eaaaf5bca54a85b7b1e666fc944c332f68e6427503af9fb8c49e"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb5a514064e02585b1cc09da2fe406a6dc1a7e5f3e92dd4f27c53e5f1465ec81"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e3a4244f65dbc3580b1275480118c3763f9dc29fc3dd96610560cb5e140a4d4a"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f6ebb910a702e41641e1e1dada3843bc11ba9107a33c98daef6945a885a40a07"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:624fbe96115fb39addafa288d583b5493bc76dab1d34d0ebba9987d6871afdf9"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1c59f1c1507b7a557cf3c410c76e91f097460da7d97e51c985343798e9df7a3c"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f6f0256cb27b6a0fb2e1918477d1b56473cd04acfa245376a342e7c15806a396"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-win32.whl", hash = "sha256:24d473d00d23a30a85802b502b417a7f5126019c3beec91a6739fe7b95388b24"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:248f6d2612e661e2b5f9a22bbd5862a1600e720da7bb6ad8a55bb1548cdfa423"}, + {file = "rapidfuzz-3.9.6-cp310-cp310-win_arm64.whl", hash = "sha256:e03fdf0e74f346ed7e798135df5f2a0fb8d6b96582b00ebef202dcf2171e1d1d"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52e4675f642fbc85632f691b67115a243cd4d2a47bdcc4a3d9a79e784518ff97"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1f93a2f13038700bd245b927c46a2017db3dcd4d4ff94687d74b5123689b873b"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b70500bca460264b8141d8040caee22e9cf0418c5388104ff0c73fb69ee28f"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1e037fb89f714a220f68f902fc6300ab7a33349f3ce8ffae668c3b3a40b0b06"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6792f66d59b86ccfad5e247f2912e255c85c575789acdbad8e7f561412ffed8a"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68d9cffe710b67f1969cf996983608cee4490521d96ea91d16bd7ea5dc80ea98"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63daaeeea76da17fa0bbe7fb05cba8ed8064bb1a0edf8360636557f8b6511961"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d214e063bffa13e3b771520b74f674b22d309b5720d4df9918ff3e0c0f037720"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ed443a2062460f44c0346cb9d269b586496b808c2419bbd6057f54061c9b9c75"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5b0c9b227ee0076fb2d58301c505bb837a290ae99ee628beacdb719f0626d749"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:82c9722b7dfaa71e8b61f8c89fed0482567fb69178e139fe4151fc71ed7df782"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c18897c95c0a288347e29537b63608a8f63a5c3cb6da258ac46fcf89155e723e"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-win32.whl", hash = "sha256:3e910cf08944da381159587709daaad9e59d8ff7bca1f788d15928f3c3d49c2a"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:59c4a61fab676d37329fc3a671618a461bfeef53a4d0b8b12e3bc24a14e166f8"}, + {file = "rapidfuzz-3.9.6-cp311-cp311-win_arm64.whl", hash = "sha256:8b4afea244102332973377fddbe54ce844d0916e1c67a5123432291717f32ffa"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:70591b28b218fff351b88cdd7f2359a01a71f9f7f5a2e465ce3715ed4b3c422b"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee2d8355c7343c631a03e57540ea06e8717c19ecf5ff64ea07e0498f7f161457"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:708fb675de0f47b9635d1cc6fbbf80d52cb710d0a1abbfae5c84c46e3abbddc3"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d66c247c2d3bb7a9b60567c395a15a929d0ebcc5f4ceedb55bfa202c38c6e0c"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15146301b32e6e3d2b7e8146db1a26747919d8b13690c7f83a4cb5dc111b3a08"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7a03da59b6c7c97e657dd5cd4bcaab5fe4a2affd8193958d6f4d938bee36679"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d2c2fe19e392dbc22695b6c3b2510527e2b774647e79936bbde49db7742d6f1"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:91aaee4c94cb45930684f583ffc4e7c01a52b46610971cede33586cf8a04a12e"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3f5702828c10768f9281180a7ff8597da1e5002803e1304e9519dd0f06d79a85"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ccd1763b608fb4629a0b08f00b3c099d6395e67c14e619f6341b2c8429c2f310"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc7a0d4b2cb166bc46d02c8c9f7551cde8e2f3c9789df3827309433ee9771163"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7496f53d40560a58964207b52586783633f371683834a8f719d6d965d223a2eb"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-win32.whl", hash = "sha256:5eb1a9272ca71bc72be5415c2fa8448a6302ea4578e181bb7da9db855b367df0"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-win_amd64.whl", hash = "sha256:0d21fc3c0ca507a1180152a6dbd129ebaef48facde3f943db5c1055b6e6be56a"}, + {file = "rapidfuzz-3.9.6-cp312-cp312-win_arm64.whl", hash = "sha256:43bb27a57c29dc5fa754496ba6a1a508480d21ae99ac0d19597646c16407e9f3"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:83a5ac6547a9d6eedaa212975cb8f2ce2aa07e6e30833b40e54a52b9f9999aa4"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:10f06139142ecde67078ebc9a745965446132b998f9feebffd71acdf218acfcc"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74720c3f24597f76c7c3e2c4abdff55f1664f4766ff5b28aeaa689f8ffba5fab"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce2bce52b5c150878e558a0418c2b637fb3dbb6eb38e4eb27d24aa839920483e"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1611199f178793ca9a060c99b284e11f6d7d124998191f1cace9a0245334d219"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0308b2ad161daf502908a6e21a57c78ded0258eba9a8f5e2545e2dafca312507"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3eda91832201b86e3b70835f91522587725bec329ec68f2f7faf5124091e5ca7"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ece873c093aedd87fc07c2a7e333d52e458dc177016afa1edaf157e82b6914d8"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d97d3c9d209d5c30172baea5966f2129e8a198fec4a1aeb2f92abb6e82a2edb1"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6c4550d0db4931f5ebe9f0678916d1b06f06f5a99ba0b8a48b9457fd8959a7d4"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b6b8dd4af6324fc325d9483bec75ecf9be33e590928c9202d408e4eafff6a0a6"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:16122ae448bc89e2bea9d81ce6cb0f751e4e07da39bd1e70b95cae2493857853"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-win32.whl", hash = "sha256:71cc168c305a4445109cd0d4925406f6e66bcb48fde99a1835387c58af4ecfe9"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-win_amd64.whl", hash = "sha256:59ee78f2ecd53fef8454909cda7400fe2cfcd820f62b8a5d4dfe930102268054"}, + {file = "rapidfuzz-3.9.6-cp313-cp313-win_arm64.whl", hash = "sha256:58b4ce83f223605c358ae37e7a2d19a41b96aa65b1fede99cc664c9053af89ac"}, + {file = "rapidfuzz-3.9.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f469dbc9c4aeaac7dd005992af74b7dff94aa56a3ea063ce64e4b3e6736dd2f"}, + {file = "rapidfuzz-3.9.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a9ed7ad9adb68d0fe63a156fe752bbf5f1403ed66961551e749641af2874da92"}, + {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39ffe48ffbeedf78d120ddfb9d583f2ca906712159a4e9c3c743c9f33e7b1775"}, + {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8502ccdea9084d54b6f737d96a3b60a84e3afed9d016686dc979b49cdac71613"}, + {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a4bec4956e06b170ca896ba055d08d4c457dac745548172443982956a80e118"}, + {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c0488b1c273be39e109ff885ccac0448b2fa74dea4c4dc676bcf756c15f16d6"}, + {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0542c036cb6acf24edd2c9e0411a67d7ba71e29e4d3001a082466b86fc34ff30"}, + {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0a96b52c9f26857bf009e270dcd829381e7a634f7ddd585fa29b87d4c82146d9"}, + {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:6edd3cd7c4aa8c68c716d349f531bd5011f2ca49ddade216bb4429460151559f"}, + {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:50b2fb55d7ed58c66d49c9f954acd8fc4a3f0e9fd0ff708299bd8abb68238d0e"}, + {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:32848dfe54391636b84cda1823fd23e5a6b1dbb8be0e9a1d80e4ee9903820994"}, + {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:29146cb7a1bf69c87e928b31bffa54f066cb65639d073b36e1425f98cccdebc6"}, + {file = "rapidfuzz-3.9.6-cp38-cp38-win32.whl", hash = "sha256:aed13e5edacb0ecadcc304cc66e93e7e77ff24f059c9792ee602c0381808e10c"}, + {file = "rapidfuzz-3.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:af440e36b828922256d0b4d79443bf2cbe5515fc4b0e9e96017ec789b36bb9fc"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:efa674b407424553024522159296690d99d6e6b1192cafe99ca84592faff16b4"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0b40ff76ee19b03ebf10a0a87938f86814996a822786c41c3312d251b7927849"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16a6c7997cb5927ced6f617122eb116ba514ec6b6f60f4803e7925ef55158891"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3f42504bdc8d770987fc3d99964766d42b2a03e4d5b0f891decdd256236bae0"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9462aa2be9f60b540c19a083471fdf28e7cf6434f068b631525b5e6251b35e"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1629698e68f47609a73bf9e73a6da3a4cac20bc710529215cbdf111ab603665b"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68bc7621843d8e9a7fd1b1a32729465bf94b47b6fb307d906da168413331f8d6"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c6254c50f15bc2fcc33cb93a95a81b702d9e6590f432a7f7822b8c7aba9ae288"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:7e535a114fa575bc143e175e4ca386a467ec8c42909eff500f5f0f13dc84e3e0"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d50acc0e9d67e4ba7a004a14c42d1b1e8b6ca1c515692746f4f8e7948c673167"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fa742ec60bec53c5a211632cf1d31b9eb5a3c80f1371a46a23ac25a1fa2ab209"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c256fa95d29cbe5aa717db790b231a9a5b49e5983d50dc9df29d364a1db5e35b"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-win32.whl", hash = "sha256:89acbf728b764421036c173a10ada436ecca22999851cdc01d0aa904c70d362d"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:c608fcba8b14d86c04cb56b203fed31a96e8a1ebb4ce99e7b70313c5bf8cf497"}, + {file = "rapidfuzz-3.9.6-cp39-cp39-win_arm64.whl", hash = "sha256:d41c00ded0e22e9dba88ff23ebe0dc9d2a5f21ba2f88e185ea7374461e61daa9"}, + {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a65c2f63218ea2dedd56fc56361035e189ca123bd9c9ce63a9bef6f99540d681"}, + {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:680dc78a5f889d3b89f74824b89fe357f49f88ad10d2c121e9c3ad37bac1e4eb"}, + {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8ca862927a0b05bd825e46ddf82d0724ea44b07d898ef639386530bf9b40f15"}, + {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2116fa1fbff21fa52cd46f3cfcb1e193ba1d65d81f8b6e123193451cd3d6c15e"}, + {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dcb7d9afd740370a897c15da61d3d57a8d54738d7c764a99cedb5f746d6a003"}, + {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1a5bd6401bb489e14cbb5981c378d53ede850b7cc84b2464cad606149cc4e17d"}, + {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:29fda70b9d03e29df6fc45cc27cbcc235534b1b0b2900e0a3ae0b43022aaeef5"}, + {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:88144f5f52ae977df9352029488326afadd7a7f42c6779d486d1f82d43b2b1f2"}, + {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:715aeaabafba2709b9dd91acb2a44bad59d60b4616ef90c08f4d4402a3bbca60"}, + {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af26ebd3714224fbf9bebbc27bdbac14f334c15f5d7043699cd694635050d6ca"}, + {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101bd2df438861a005ed47c032631b7857dfcdb17b82beeeb410307983aac61d"}, + {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2185e8e29809b97ad22a7f99281d1669a89bdf5fa1ef4ef1feca36924e675367"}, + {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9e53c72d08f0e9c6e4a369e52df5971f311305b4487690c62e8dd0846770260c"}, + {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a0cb157162f0cdd62e538c7bd298ff669847fc43a96422811d5ab933f4c16c3a"}, + {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bb5ff2bd48132ed5e7fbb8f619885facb2e023759f2519a448b2c18afe07e5d"}, + {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6dc37f601865e8407e3a8037ffbc3afe0b0f837b2146f7632bd29d087385babe"}, + {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a657eee4b94668faf1fa2703bdd803654303f7e468eb9ba10a664d867ed9e779"}, + {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:51be6ab5b1d5bb32abd39718f2a5e3835502e026a8272d139ead295c224a6f5e"}, + {file = "rapidfuzz-3.9.6.tar.gz", hash = "sha256:5cf2a7d621e4515fee84722e93563bf77ff2cbe832a77a48b81f88f9e23b9e8d"}, ] [package.extras] @@ -3435,111 +3491,121 @@ files = [ [[package]] name = "safetensors" -version = "0.4.3" +version = "0.4.4" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "safetensors-0.4.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:dcf5705cab159ce0130cd56057f5f3425023c407e170bca60b4868048bae64fd"}, - {file = "safetensors-0.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bb4f8c5d0358a31e9a08daeebb68f5e161cdd4018855426d3f0c23bb51087055"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70a5319ef409e7f88686a46607cbc3c428271069d8b770076feaf913664a07ac"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fb9c65bd82f9ef3ce4970dc19ee86be5f6f93d032159acf35e663c6bea02b237"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edb5698a7bc282089f64c96c477846950358a46ede85a1c040e0230344fdde10"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efcc860be094b8d19ac61b452ec635c7acb9afa77beb218b1d7784c6d41fe8ad"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d88b33980222085dd6001ae2cad87c6068e0991d4f5ccf44975d216db3b57376"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5fc6775529fb9f0ce2266edd3e5d3f10aab068e49f765e11f6f2a63b5367021d"}, - {file = "safetensors-0.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9c6ad011c1b4e3acff058d6b090f1da8e55a332fbf84695cf3100c649cc452d1"}, - {file = "safetensors-0.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c496c5401c1b9c46d41a7688e8ff5b0310a3b9bae31ce0f0ae870e1ea2b8caf"}, - {file = "safetensors-0.4.3-cp310-none-win32.whl", hash = "sha256:38e2a8666178224a51cca61d3cb4c88704f696eac8f72a49a598a93bbd8a4af9"}, - {file = "safetensors-0.4.3-cp310-none-win_amd64.whl", hash = "sha256:393e6e391467d1b2b829c77e47d726f3b9b93630e6a045b1d1fca67dc78bf632"}, - {file = "safetensors-0.4.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:22f3b5d65e440cec0de8edaa672efa888030802e11c09b3d6203bff60ebff05a"}, - {file = "safetensors-0.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c4fa560ebd4522adddb71dcd25d09bf211b5634003f015a4b815b7647d62ebe"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9afd5358719f1b2cf425fad638fc3c887997d6782da317096877e5b15b2ce93"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d8c5093206ef4b198600ae484230402af6713dab1bd5b8e231905d754022bec7"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0b2104df1579d6ba9052c0ae0e3137c9698b2d85b0645507e6fd1813b70931a"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8cf18888606dad030455d18f6c381720e57fc6a4170ee1966adb7ebc98d4d6a3"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bf4f9d6323d9f86eef5567eabd88f070691cf031d4c0df27a40d3b4aaee755b"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:585c9ae13a205807b63bef8a37994f30c917ff800ab8a1ca9c9b5d73024f97ee"}, - {file = "safetensors-0.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faefeb3b81bdfb4e5a55b9bbdf3d8d8753f65506e1d67d03f5c851a6c87150e9"}, - {file = "safetensors-0.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:befdf0167ad626f22f6aac6163477fcefa342224a22f11fdd05abb3995c1783c"}, - {file = "safetensors-0.4.3-cp311-none-win32.whl", hash = "sha256:a7cef55929dcbef24af3eb40bedec35d82c3c2fa46338bb13ecf3c5720af8a61"}, - {file = "safetensors-0.4.3-cp311-none-win_amd64.whl", hash = "sha256:840b7ac0eff5633e1d053cc9db12fdf56b566e9403b4950b2dc85393d9b88d67"}, - {file = "safetensors-0.4.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:22d21760dc6ebae42e9c058d75aa9907d9f35e38f896e3c69ba0e7b213033856"}, - {file = "safetensors-0.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d22c1a10dff3f64d0d68abb8298a3fd88ccff79f408a3e15b3e7f637ef5c980"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1648568667f820b8c48317c7006221dc40aced1869908c187f493838a1362bc"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:446e9fe52c051aeab12aac63d1017e0f68a02a92a027b901c4f8e931b24e5397"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fef5d70683643618244a4f5221053567ca3e77c2531e42ad48ae05fae909f542"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a1f4430cc0c9d6afa01214a4b3919d0a029637df8e09675ceef1ca3f0dfa0df"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d603846a8585b9432a0fd415db1d4c57c0f860eb4aea21f92559ff9902bae4d"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a844cdb5d7cbc22f5f16c7e2a0271170750763c4db08381b7f696dbd2c78a361"}, - {file = "safetensors-0.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:88887f69f7a00cf02b954cdc3034ffb383b2303bc0ab481d4716e2da51ddc10e"}, - {file = "safetensors-0.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ee463219d9ec6c2be1d331ab13a8e0cd50d2f32240a81d498266d77d07b7e71e"}, - {file = "safetensors-0.4.3-cp312-none-win32.whl", hash = "sha256:d0dd4a1db09db2dba0f94d15addc7e7cd3a7b0d393aa4c7518c39ae7374623c3"}, - {file = "safetensors-0.4.3-cp312-none-win_amd64.whl", hash = "sha256:d14d30c25897b2bf19b6fb5ff7e26cc40006ad53fd4a88244fdf26517d852dd7"}, - {file = "safetensors-0.4.3-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d1456f814655b224d4bf6e7915c51ce74e389b413be791203092b7ff78c936dd"}, - {file = "safetensors-0.4.3-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:455d538aa1aae4a8b279344a08136d3f16334247907b18a5c3c7fa88ef0d3c46"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf476bca34e1340ee3294ef13e2c625833f83d096cfdf69a5342475602004f95"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02ef3a24face643456020536591fbd3c717c5abaa2737ec428ccbbc86dffa7a4"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7de32d0d34b6623bb56ca278f90db081f85fb9c5d327e3c18fd23ac64f465768"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a0deb16a1d3ea90c244ceb42d2c6c276059616be21a19ac7101aa97da448faf"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c59d51f182c729f47e841510b70b967b0752039f79f1de23bcdd86462a9b09ee"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1f598b713cc1a4eb31d3b3203557ac308acf21c8f41104cdd74bf640c6e538e3"}, - {file = "safetensors-0.4.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5757e4688f20df083e233b47de43845d1adb7e17b6cf7da5f8444416fc53828d"}, - {file = "safetensors-0.4.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fe746d03ed8d193674a26105e4f0fe6c726f5bb602ffc695b409eaf02f04763d"}, - {file = "safetensors-0.4.3-cp37-none-win32.whl", hash = "sha256:0d5ffc6a80f715c30af253e0e288ad1cd97a3d0086c9c87995e5093ebc075e50"}, - {file = "safetensors-0.4.3-cp37-none-win_amd64.whl", hash = "sha256:a11c374eb63a9c16c5ed146457241182f310902bd2a9c18255781bb832b6748b"}, - {file = "safetensors-0.4.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1e31be7945f66be23f4ec1682bb47faa3df34cb89fc68527de6554d3c4258a4"}, - {file = "safetensors-0.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:03a4447c784917c9bf01d8f2ac5080bc15c41692202cd5f406afba16629e84d6"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d244bcafeb1bc06d47cfee71727e775bca88a8efda77a13e7306aae3813fa7e4"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53c4879b9c6bd7cd25d114ee0ef95420e2812e676314300624594940a8d6a91f"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74707624b81f1b7f2b93f5619d4a9f00934d5948005a03f2c1845ffbfff42212"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d52c958dc210265157573f81d34adf54e255bc2b59ded6218500c9b15a750eb"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f9568f380f513a60139971169c4a358b8731509cc19112369902eddb33faa4d"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0d9cd8e1560dfc514b6d7859247dc6a86ad2f83151a62c577428d5102d872721"}, - {file = "safetensors-0.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:89f9f17b0dacb913ed87d57afbc8aad85ea42c1085bd5de2f20d83d13e9fc4b2"}, - {file = "safetensors-0.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1139eb436fd201c133d03c81209d39ac57e129f5e74e34bb9ab60f8d9b726270"}, - {file = "safetensors-0.4.3-cp38-none-win32.whl", hash = "sha256:d9c289f140a9ae4853fc2236a2ffc9a9f2d5eae0cb673167e0f1b8c18c0961ac"}, - {file = "safetensors-0.4.3-cp38-none-win_amd64.whl", hash = "sha256:622afd28968ef3e9786562d352659a37de4481a4070f4ebac883f98c5836563e"}, - {file = "safetensors-0.4.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8651c7299cbd8b4161a36cd6a322fa07d39cd23535b144d02f1c1972d0c62f3c"}, - {file = "safetensors-0.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e375d975159ac534c7161269de24ddcd490df2157b55c1a6eeace6cbb56903f0"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:084fc436e317f83f7071fc6a62ca1c513b2103db325cd09952914b50f51cf78f"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:41a727a7f5e6ad9f1db6951adee21bbdadc632363d79dc434876369a17de6ad6"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7dbbde64b6c534548696808a0e01276d28ea5773bc9a2dfb97a88cd3dffe3df"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bbae3b4b9d997971431c346edbfe6e41e98424a097860ee872721e176040a893"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01e4b22e3284cd866edeabe4f4d896229495da457229408d2e1e4810c5187121"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dd37306546b58d3043eb044c8103a02792cc024b51d1dd16bd3dd1f334cb3ed"}, - {file = "safetensors-0.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8815b5e1dac85fc534a97fd339e12404db557878c090f90442247e87c8aeaea"}, - {file = "safetensors-0.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e011cc162503c19f4b1fd63dfcddf73739c7a243a17dac09b78e57a00983ab35"}, - {file = "safetensors-0.4.3-cp39-none-win32.whl", hash = "sha256:01feb3089e5932d7e662eda77c3ecc389f97c0883c4a12b5cfdc32b589a811c3"}, - {file = "safetensors-0.4.3-cp39-none-win_amd64.whl", hash = "sha256:3f9cdca09052f585e62328c1c2923c70f46814715c795be65f0b93f57ec98a02"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1b89381517891a7bb7d1405d828b2bf5d75528299f8231e9346b8eba092227f9"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:cd6fff9e56df398abc5866b19a32124815b656613c1c5ec0f9350906fd798aac"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:840caf38d86aa7014fe37ade5d0d84e23dcfbc798b8078015831996ecbc206a3"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9650713b2cfa9537a2baf7dd9fee458b24a0aaaa6cafcea8bdd5fb2b8efdc34"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e4119532cd10dba04b423e0f86aecb96cfa5a602238c0aa012f70c3a40c44b50"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e066e8861eef6387b7c772344d1fe1f9a72800e04ee9a54239d460c400c72aab"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:90964917f5b0fa0fa07e9a051fbef100250c04d150b7026ccbf87a34a54012e0"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c41e1893d1206aa7054029681778d9a58b3529d4c807002c156d58426c225173"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae7613a119a71a497d012ccc83775c308b9c1dab454806291427f84397d852fd"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9bac020faba7f5dc481e881b14b6425265feabb5bfc552551d21189c0eddc3"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:420a98f593ff9930f5822560d14c395ccbc57342ddff3b463bc0b3d6b1951550"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f5e6883af9a68c0028f70a4c19d5a6ab6238a379be36ad300a22318316c00cb0"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:cdd0a3b5da66e7f377474599814dbf5cbf135ff059cc73694de129b58a5e8a2c"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9bfb92f82574d9e58401d79c70c716985dc049b635fef6eecbb024c79b2c46ad"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3615a96dd2dcc30eb66d82bc76cda2565f4f7bfa89fcb0e31ba3cea8a1a9ecbb"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:868ad1b6fc41209ab6bd12f63923e8baeb1a086814cb2e81a65ed3d497e0cf8f"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ffba80aa49bd09195145a7fd233a7781173b422eeb995096f2b30591639517"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0acbe31340ab150423347e5b9cc595867d814244ac14218932a5cf1dd38eb39"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19bbdf95de2cf64f25cd614c5236c8b06eb2cfa47cbf64311f4b5d80224623a3"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b852e47eb08475c2c1bd8131207b405793bfc20d6f45aff893d3baaad449ed14"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d07cbca5b99babb692d76d8151bec46f461f8ad8daafbfd96b2fca40cadae65"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1ab6527a20586d94291c96e00a668fa03f86189b8a9defa2cdd34a1a01acc7d5"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02318f01e332cc23ffb4f6716e05a492c5f18b1d13e343c49265149396284a44"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec4b52ce9a396260eb9731eb6aea41a7320de22ed73a1042c2230af0212758ce"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:018b691383026a2436a22b648873ed11444a364324e7088b99cd2503dd828400"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:309b10dbcab63269ecbf0e2ca10ce59223bb756ca5d431ce9c9eeabd446569da"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b277482120df46e27a58082df06a15aebda4481e30a1c21eefd0921ae7e03f65"}, - {file = "safetensors-0.4.3.tar.gz", hash = "sha256:2f85fc50c4e07a21e95c24e07460fe6f7e2859d0ce88092838352b798ce711c2"}, + {file = "safetensors-0.4.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2adb497ada13097f30e386e88c959c0fda855a5f6f98845710f5bb2c57e14f12"}, + {file = "safetensors-0.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7db7fdc2d71fd1444d85ca3f3d682ba2df7d61a637dfc6d80793f439eae264ab"}, + {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4f0eed76b430f009fbefca1a0028ddb112891b03cb556d7440d5cd68eb89a9"}, + {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:57d216fab0b5c432aabf7170883d7c11671622bde8bd1436c46d633163a703f6"}, + {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d9b76322e49c056bcc819f8bdca37a2daa5a6d42c07f30927b501088db03309"}, + {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32f0d1f6243e90ee43bc6ee3e8c30ac5b09ca63f5dd35dbc985a1fc5208c451a"}, + {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d464bdc384874601a177375028012a5f177f1505279f9456fea84bbc575c7f"}, + {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63144e36209ad8e4e65384dbf2d52dd5b1866986079c00a72335402a38aacdc5"}, + {file = "safetensors-0.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:051d5ecd490af7245258000304b812825974d5e56f14a3ff7e1b8b2ba6dc2ed4"}, + {file = "safetensors-0.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:51bc8429d9376224cd3cf7e8ce4f208b4c930cd10e515b6ac6a72cbc3370f0d9"}, + {file = "safetensors-0.4.4-cp310-none-win32.whl", hash = "sha256:fb7b54830cee8cf9923d969e2df87ce20e625b1af2fd194222ab902d3adcc29c"}, + {file = "safetensors-0.4.4-cp310-none-win_amd64.whl", hash = "sha256:4b3e8aa8226d6560de8c2b9d5ff8555ea482599c670610758afdc97f3e021e9c"}, + {file = "safetensors-0.4.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bbaa31f2cb49013818bde319232ccd72da62ee40f7d2aa532083eda5664e85ff"}, + {file = "safetensors-0.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9fdcb80f4e9fbb33b58e9bf95e7dbbedff505d1bcd1c05f7c7ce883632710006"}, + {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55c14c20be247b8a1aeaf3ab4476265e3ca83096bb8e09bb1a7aa806088def4f"}, + {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:949aaa1118660f992dbf0968487b3e3cfdad67f948658ab08c6b5762e90cc8b6"}, + {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c11a4ab7debc456326a2bac67f35ee0ac792bcf812c7562a4a28559a5c795e27"}, + {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0cea44bba5c5601b297bc8307e4075535b95163402e4906b2e9b82788a2a6df"}, + {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9d752c97f6bbe327352f76e5b86442d776abc789249fc5e72eacb49e6916482"}, + {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03f2bb92e61b055ef6cc22883ad1ae898010a95730fa988c60a23800eb742c2c"}, + {file = "safetensors-0.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:87bf3f91a9328a941acc44eceffd4e1f5f89b030985b2966637e582157173b98"}, + {file = "safetensors-0.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:20d218ec2b6899d29d6895419a58b6e44cc5ff8f0cc29fac8d236a8978ab702e"}, + {file = "safetensors-0.4.4-cp311-none-win32.whl", hash = "sha256:8079486118919f600c603536e2490ca37b3dbd3280e3ad6eaacfe6264605ac8a"}, + {file = "safetensors-0.4.4-cp311-none-win_amd64.whl", hash = "sha256:2f8c2eb0615e2e64ee27d478c7c13f51e5329d7972d9e15528d3e4cfc4a08f0d"}, + {file = "safetensors-0.4.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:baec5675944b4a47749c93c01c73d826ef7d42d36ba8d0dba36336fa80c76426"}, + {file = "safetensors-0.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f15117b96866401825f3e94543145028a2947d19974429246ce59403f49e77c6"}, + {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a13a9caea485df164c51be4eb0c87f97f790b7c3213d635eba2314d959fe929"}, + {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b54bc4ca5f9b9bba8cd4fb91c24b2446a86b5ae7f8975cf3b7a277353c3127c"}, + {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08332c22e03b651c8eb7bf5fc2de90044f3672f43403b3d9ac7e7e0f4f76495e"}, + {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bb62841e839ee992c37bb75e75891c7f4904e772db3691c59daaca5b4ab960e1"}, + {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e5b927acc5f2f59547270b0309a46d983edc44be64e1ca27a7fcb0474d6cd67"}, + {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a69c71b1ae98a8021a09a0b43363b0143b0ce74e7c0e83cacba691b62655fb8"}, + {file = "safetensors-0.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23654ad162c02a5636f0cd520a0310902c4421aab1d91a0b667722a4937cc445"}, + {file = "safetensors-0.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0677c109d949cf53756859160b955b2e75b0eefe952189c184d7be30ecf7e858"}, + {file = "safetensors-0.4.4-cp312-none-win32.whl", hash = "sha256:a51d0ddd4deb8871c6de15a772ef40b3dbd26a3c0451bb9e66bc76fc5a784e5b"}, + {file = "safetensors-0.4.4-cp312-none-win_amd64.whl", hash = "sha256:2d065059e75a798bc1933c293b68d04d79b586bb7f8c921e0ca1e82759d0dbb1"}, + {file = "safetensors-0.4.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:9d625692578dd40a112df30c02a1adf068027566abd8e6a74893bb13d441c150"}, + {file = "safetensors-0.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7cabcf39c81e5b988d0adefdaea2eb9b4fd9bd62d5ed6559988c62f36bfa9a89"}, + {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8359bef65f49d51476e9811d59c015f0ddae618ee0e44144f5595278c9f8268c"}, + {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1a32c662e7df9226fd850f054a3ead0e4213a96a70b5ce37b2d26ba27004e013"}, + {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c329a4dcc395364a1c0d2d1574d725fe81a840783dda64c31c5a60fc7d41472c"}, + {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:239ee093b1db877c9f8fe2d71331a97f3b9c7c0d3ab9f09c4851004a11f44b65"}, + {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd574145d930cf9405a64f9923600879a5ce51d9f315443a5f706374841327b6"}, + {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f6784eed29f9e036acb0b7769d9e78a0dc2c72c2d8ba7903005350d817e287a4"}, + {file = "safetensors-0.4.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:65a4a6072436bf0a4825b1c295d248cc17e5f4651e60ee62427a5bcaa8622a7a"}, + {file = "safetensors-0.4.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:df81e3407630de060ae8313da49509c3caa33b1a9415562284eaf3d0c7705f9f"}, + {file = "safetensors-0.4.4-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:e4a0f374200e8443d9746e947ebb346c40f83a3970e75a685ade0adbba5c48d9"}, + {file = "safetensors-0.4.4-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:181fb5f3dee78dae7fd7ec57d02e58f7936498d587c6b7c1c8049ef448c8d285"}, + {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb4ac1d8f6b65ec84ddfacd275079e89d9df7c92f95675ba96c4f790a64df6e"}, + {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76897944cd9239e8a70955679b531b9a0619f76e25476e57ed373322d9c2075d"}, + {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a9e9d1a27e51a0f69e761a3d581c3af46729ec1c988fa1f839e04743026ae35"}, + {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:005ef9fc0f47cb9821c40793eb029f712e97278dae84de91cb2b4809b856685d"}, + {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26987dac3752688c696c77c3576f951dbbdb8c57f0957a41fb6f933cf84c0b62"}, + {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c05270b290acd8d249739f40d272a64dd597d5a4b90f27d830e538bc2549303c"}, + {file = "safetensors-0.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:068d3a33711fc4d93659c825a04480ff5a3854e1d78632cdc8f37fee917e8a60"}, + {file = "safetensors-0.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:063421ef08ca1021feea8b46951251b90ae91f899234dd78297cbe7c1db73b99"}, + {file = "safetensors-0.4.4-cp37-none-win32.whl", hash = "sha256:d52f5d0615ea83fd853d4e1d8acf93cc2e0223ad4568ba1e1f6ca72e94ea7b9d"}, + {file = "safetensors-0.4.4-cp37-none-win_amd64.whl", hash = "sha256:88a5ac3280232d4ed8e994cbc03b46a1807ce0aa123867b40c4a41f226c61f94"}, + {file = "safetensors-0.4.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3467ab511bfe3360967d7dc53b49f272d59309e57a067dd2405b4d35e7dcf9dc"}, + {file = "safetensors-0.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2ab4c96d922e53670ce25fbb9b63d5ea972e244de4fa1dd97b590d9fd66aacef"}, + {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87df18fce4440477c3ef1fd7ae17c704a69a74a77e705a12be135ee0651a0c2d"}, + {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e5fe345b2bc7d88587149ac11def1f629d2671c4c34f5df38aed0ba59dc37f8"}, + {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f1a3e01dce3cd54060791e7e24588417c98b941baa5974700eeb0b8eb65b0a0"}, + {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c6bf35e9a8998d8339fd9a05ac4ce465a4d2a2956cc0d837b67c4642ed9e947"}, + {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:166c0c52f6488b8538b2a9f3fbc6aad61a7261e170698779b371e81b45f0440d"}, + {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:87e9903b8668a16ef02c08ba4ebc91e57a49c481e9b5866e31d798632805014b"}, + {file = "safetensors-0.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a9c421153aa23c323bd8483d4155b4eee82c9a50ac11cccd83539104a8279c64"}, + {file = "safetensors-0.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a4b8617499b2371c7353302c5116a7e0a3a12da66389ce53140e607d3bf7b3d3"}, + {file = "safetensors-0.4.4-cp38-none-win32.whl", hash = "sha256:c6280f5aeafa1731f0a3709463ab33d8e0624321593951aefada5472f0b313fd"}, + {file = "safetensors-0.4.4-cp38-none-win_amd64.whl", hash = "sha256:6ceed6247fc2d33b2a7b7d25d8a0fe645b68798856e0bc7a9800c5fd945eb80f"}, + {file = "safetensors-0.4.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5cf6c6f6193797372adf50c91d0171743d16299491c75acad8650107dffa9269"}, + {file = "safetensors-0.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:419010156b914a3e5da4e4adf992bee050924d0fe423c4b329e523e2c14c3547"}, + {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88f6fd5a5c1302ce79993cc5feeadcc795a70f953c762544d01fb02b2db4ea33"}, + {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d468cffb82d90789696d5b4d8b6ab8843052cba58a15296691a7a3df55143cd2"}, + {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9353c2af2dd467333d4850a16edb66855e795561cd170685178f706c80d2c71e"}, + {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83c155b4a33368d9b9c2543e78f2452090fb030c52401ca608ef16fa58c98353"}, + {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9850754c434e636ce3dc586f534bb23bcbd78940c304775bee9005bf610e98f1"}, + {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:275f500b4d26f67b6ec05629a4600645231bd75e4ed42087a7c1801bff04f4b3"}, + {file = "safetensors-0.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5c2308de665b7130cd0e40a2329278226e4cf083f7400c51ca7e19ccfb3886f3"}, + {file = "safetensors-0.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e06a9ebc8656e030ccfe44634f2a541b4b1801cd52e390a53ad8bacbd65f8518"}, + {file = "safetensors-0.4.4-cp39-none-win32.whl", hash = "sha256:ef73df487b7c14b477016947c92708c2d929e1dee2bacdd6fff5a82ed4539537"}, + {file = "safetensors-0.4.4-cp39-none-win_amd64.whl", hash = "sha256:83d054818a8d1198d8bd8bc3ea2aac112a2c19def2bf73758321976788706398"}, + {file = "safetensors-0.4.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1d1f34c71371f0e034004a0b583284b45d233dd0b5f64a9125e16b8a01d15067"}, + {file = "safetensors-0.4.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a8043a33d58bc9b30dfac90f75712134ca34733ec3d8267b1bd682afe7194f5"}, + {file = "safetensors-0.4.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8db8f0c59c84792c12661f8efa85de160f80efe16b87a9d5de91b93f9e0bce3c"}, + {file = "safetensors-0.4.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfc1fc38e37630dd12d519bdec9dcd4b345aec9930bb9ce0ed04461f49e58b52"}, + {file = "safetensors-0.4.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e5c9d86d9b13b18aafa88303e2cd21e677f5da2a14c828d2c460fe513af2e9a5"}, + {file = "safetensors-0.4.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:43251d7f29a59120a26f5a0d9583b9e112999e500afabcfdcb91606d3c5c89e3"}, + {file = "safetensors-0.4.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2c42e9b277513b81cf507e6121c7b432b3235f980cac04f39f435b7902857f91"}, + {file = "safetensors-0.4.4-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3daacc9a4e3f428a84dd56bf31f20b768eb0b204af891ed68e1f06db9edf546f"}, + {file = "safetensors-0.4.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:218bbb9b883596715fc9997bb42470bf9f21bb832c3b34c2bf744d6fa8f2bbba"}, + {file = "safetensors-0.4.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bd5efc26b39f7fc82d4ab1d86a7f0644c8e34f3699c33f85bfa9a717a030e1b"}, + {file = "safetensors-0.4.4-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56ad9776b65d8743f86698a1973292c966cf3abff627efc44ed60e66cc538ddd"}, + {file = "safetensors-0.4.4-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:30f23e6253c5f43a809dea02dc28a9f5fa747735dc819f10c073fe1b605e97d4"}, + {file = "safetensors-0.4.4-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5512078d00263de6cb04e9d26c9ae17611098f52357fea856213e38dc462f81f"}, + {file = "safetensors-0.4.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b96c3d9266439d17f35fc2173111d93afc1162f168e95aed122c1ca517b1f8f1"}, + {file = "safetensors-0.4.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:08d464aa72a9a13826946b4fb9094bb4b16554bbea2e069e20bd903289b6ced9"}, + {file = "safetensors-0.4.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:210160816d5a36cf41f48f38473b6f70d7bcb4b0527bedf0889cc0b4c3bb07db"}, + {file = "safetensors-0.4.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb276a53717f2bcfb6df0bcf284d8a12069002508d4c1ca715799226024ccd45"}, + {file = "safetensors-0.4.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a2c28c6487f17d8db0089e8b2cdc13de859366b94cc6cdc50e1b0a4147b56551"}, + {file = "safetensors-0.4.4-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7915f0c60e4e6e65d90f136d85dd3b429ae9191c36b380e626064694563dbd9f"}, + {file = "safetensors-0.4.4-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:00eea99ae422fbfa0b46065acbc58b46bfafadfcec179d4b4a32d5c45006af6c"}, + {file = "safetensors-0.4.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb1ed4fcb0b3c2f3ea2c5767434622fe5d660e5752f21ac2e8d737b1e5e480bb"}, + {file = "safetensors-0.4.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:73fc9a0a4343188bdb421783e600bfaf81d0793cd4cce6bafb3c2ed567a74cd5"}, + {file = "safetensors-0.4.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c37e6b714200824c73ca6eaf007382de76f39466a46e97558b8dc4cf643cfbf"}, + {file = "safetensors-0.4.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f75698c5c5c542417ac4956acfc420f7d4a2396adca63a015fd66641ea751759"}, + {file = "safetensors-0.4.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca1a209157f242eb183e209040097118472e169f2e069bfbd40c303e24866543"}, + {file = "safetensors-0.4.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:177f2b60a058f92a3cec7a1786c9106c29eca8987ecdfb79ee88126e5f47fa31"}, + {file = "safetensors-0.4.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ee9622e84fe6e4cd4f020e5fda70d6206feff3157731df7151d457fdae18e541"}, + {file = "safetensors-0.4.4.tar.gz", hash = "sha256:5fe3e9b705250d0172ed4e100a811543108653fb2b66b9e702a088ad03772a07"}, ] [package.extras] @@ -3621,24 +3687,24 @@ files = [ [[package]] name = "soupsieve" -version = "2.5" +version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] [[package]] name = "sympy" -version = "1.13.1" +version = "1.13.2" description = "Computer algebra system (CAS) in Python" optional = false python-versions = ">=3.8" files = [ - {file = "sympy-1.13.1-py3-none-any.whl", hash = "sha256:db36cdc64bf61b9b24578b6f7bab1ecdd2452cf008f34faa33776680c26d66f8"}, - {file = "sympy-1.13.1.tar.gz", hash = "sha256:9cebf7e04ff162015ce31c9c6c9144daa34a93bd082f54fd8f12deca4f47515f"}, + {file = "sympy-1.13.2-py3-none-any.whl", hash = "sha256:c51d75517712f1aed280d4ce58506a4a88d635d6b5dd48b39102a7ae1f3fcfe9"}, + {file = "sympy-1.13.2.tar.gz", hash = "sha256:401449d84d07be9d0c7a46a64bd54fe097667d5e7181bfe67ec777be9e01cb13"}, ] [package.dependencies] @@ -3678,13 +3744,13 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"] [[package]] name = "timm" -version = "1.0.7" +version = "1.0.8" description = "PyTorch Image Models" optional = false python-versions = ">=3.8" files = [ - {file = "timm-1.0.7-py3-none-any.whl", hash = "sha256:942ced65b47b5ec12b8df07eb8ee929f1bb310402155b28931ab7a85ecc1cef2"}, - {file = "timm-1.0.7.tar.gz", hash = "sha256:d1d26d906b5e188d7e7d536a6a0999568bb184f884f9a334c48d46fc6dc166c8"}, + {file = "timm-1.0.8-py3-none-any.whl", hash = "sha256:2e4cf9e2224616fdb08e5f7a2972bd20e05f750236ea1f8dd53f3f326ceaee83"}, + {file = "timm-1.0.8.tar.gz", hash = "sha256:f54a579f1cc39c43d99a4b03603e39c4cee87d4f0a08aba9c22e19064b30bf95"}, ] [package.dependencies] @@ -3920,13 +3986,13 @@ scipy = ["scipy"] [[package]] name = "tqdm" -version = "4.66.4" +version = "4.66.5" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, - {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, ] [package.dependencies] @@ -3940,13 +4006,13 @@ telegram = ["requests"] [[package]] name = "transformers" -version = "4.43.2" +version = "4.44.1" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" optional = false python-versions = ">=3.8.0" files = [ - {file = "transformers-4.43.2-py3-none-any.whl", hash = "sha256:283c8b47cf38640c5c0caea60be0dfa948669fa48e9739b03717cbf5e8b20f11"}, - {file = "transformers-4.43.2.tar.gz", hash = "sha256:99dbbdeef9d451cdbc1c5316dce3af3dd5bb56b6cda5d0c564253e8fa7ccaf02"}, + {file = "transformers-4.44.1-py3-none-any.whl", hash = "sha256:bd2642da18b4e6d29b135c17650cd7ca8e874f2d092d2eddd3ed6b71a93a155c"}, + {file = "transformers-4.44.1.tar.gz", hash = "sha256:3b9a1a07ca65c665c7bf6109b7da76182184d10bb58d9ab14e6892e7b9e073a2"}, ] [package.dependencies] @@ -3969,10 +4035,10 @@ audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] benchmark = ["optimum-benchmark (>=0.2.0)"] codecarbon = ["codecarbon (==1.2.0)"] deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.19,<0.20)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.19,<0.20)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)", "scipy (<1.13.0)"] flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] ftfy = ["ftfy"] @@ -3983,17 +4049,17 @@ natten = ["natten (>=0.14.6,<0.15.0)"] onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "isort (>=5.5.4)", "ruff (==0.4.4)", "urllib3 (<2.0.0)"] +quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "isort (>=5.5.4)", "ruff (==0.5.1)", "urllib3 (<2.0.0)"] ray = ["ray[tune] (>=2.7.0)"] retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] -ruff = ["ruff (==0.4.4)"] +ruff = ["ruff (==0.5.1)"] sagemaker = ["sagemaker (>=2.31.0)"] sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] serving = ["fastapi", "pydantic", "starlette", "uvicorn"] sigopt = ["sigopt"] sklearn = ["scikit-learn"] speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk", "parameterized", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk", "parameterized", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] tf = ["keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] tf-cpu = ["keras (>2.9,<2.16)", "keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow-cpu (>2.9,<2.16)", "tensorflow-probability (<0.24)", "tensorflow-text (<2.16)", "tf2onnx"] tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] @@ -4249,13 +4315,13 @@ transformers = ">=4.25.1" [[package]] name = "unstructured-pytesseract" -version = "0.3.12" +version = "0.3.13" description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" optional = false python-versions = ">=3.8" files = [ - {file = "unstructured.pytesseract-0.3.12-py3-none-any.whl", hash = "sha256:6ed42530fc697bb08d1ae4884cc517ee808620c1c1414efe8d5d90334da068d3"}, - {file = "unstructured.pytesseract-0.3.12.tar.gz", hash = "sha256:751a21d67b1f109036bf4daf796d3e04631697a355efd650f3373412b249de2e"}, + {file = "unstructured.pytesseract-0.3.13-py3-none-any.whl", hash = "sha256:8001bc860470d56185176eb3ceb4623e888eba058ca3b30af79003784bc40e19"}, + {file = "unstructured.pytesseract-0.3.13.tar.gz", hash = "sha256:ff2e6391496e457dbf4b4e327f4a4577cce18921ea6570dc74bd64381b10e963"}, ] [package.dependencies] @@ -4387,13 +4453,13 @@ files = [ [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, ] [package.extras] diff --git a/libs/partners/unstructured/pyproject.toml b/libs/partners/unstructured/pyproject.toml index 38f360549c2df..c8c95b62a092c 100644 --- a/libs/partners/unstructured/pyproject.toml +++ b/libs/partners/unstructured/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-unstructured" -version = "0.1.1" +version = "0.1.2" description = "An integration package connecting Unstructured and LangChain" authors = [] readme = "README.md" From 9daff60698695417a0e7e11e634d5b38639c0c04 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Wed, 21 Aug 2024 12:55:17 -0700 Subject: [PATCH 47/80] docs: fix openai api ref (#25639) --- docs/api_reference/guide_imports.json | 2 +- .../langchain_openai/chat_models/base.py | 47 +++++++++---------- 2 files changed, 24 insertions(+), 25 deletions(-) diff --git a/docs/api_reference/guide_imports.json b/docs/api_reference/guide_imports.json index ed408b974c657..7c312af840f4a 100644 --- a/docs/api_reference/guide_imports.json +++ b/docs/api_reference/guide_imports.json @@ -1 +1 @@ -{"ChatPromptTemplate": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "Wait for the fine-tuning to complete (this may take some time)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/", "del os.environ['NVIDIA_API_KEY'] ## delete key and reset": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "redis_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "Optionally, specify your own session_state key for storing messages": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "copy from tidb cloud console": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "Install Langchain community and core packages": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/", "open ../../../static/img/brand/wordmark.png as base64 str": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "groq.md": "https://python.langchain.com/v0.2/docs/integrations/chat/groq/", "openai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/openai/", "for running these examples in the notebook:": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "get a chat completion from the formatted messages": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "LangChain supports many other chat models. Here, we're using Ollama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "If api_key is not passed, default behavior is to use the `MISTRAL_API_KEY` environment variable.": "https://python.langchain.com/v0.2/docs/integrations/chat/mistralai/", "ai21.md": "https://python.langchain.com/v0.2/docs/integrations/chat/ai21/", "!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "edenai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/edenai/", "yuan2.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "Loading the COMVEST 2024 notice": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "perplexity.md": "https://python.langchain.com/v0.2/docs/integrations/chat/perplexity/", "using chat invoke": "https://python.langchain.com/v0.2/docs/integrations/chat/upstage/", "Or via the async API": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "Fiddler project and model names, used for model registration": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Must be an OpenAI model that supports functions": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/", "see https://python.langchain.com/en/latest/modules/data_connection/getting_started.html for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Note that we set max_retries = 0 to avoid retrying on RateLimits, etc": "https://python.langchain.com/v0.2/docs/guides/productionization/fallbacks/", "moderation.md": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/moderation/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/prompting/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/quickstart/", "%pip install -qU langchain langchain-community langchain-openai langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/no_queries/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/step_back/", "%pip install -qU langchain langchain-community langchain-openai faker langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/high_cardinality/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/hyde/", "Optional, uncomment to trace runs with LangSmith. Sign up here: https://smith.langchain.com.": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/routing/", "%pip install -qU langchain langchain-openai youtube-transcript-api pytube": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/structuring/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/query_checking/", "Install a model capable of tool calling": "https://python.langchain.com/v0.2/docs/use_cases/extraction/quickstart/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "Set up a parser": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/parse/", "Using LangSmith is recommended but not required. Uncomment below lines to use.": "https://python.langchain.com/v0.2/docs/use_cases/sql/csv/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/index/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "Construct the Tools agent": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/tool_calling/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/", "!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/", "Tool calling {#tool-calling}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/function_calling/", "Prompts": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/prompts-checkpoint/", "openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_functions/", "openai_tools.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_tools/", "This is a prompt template used to format each individual example.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples_chat/", "Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/", "Prompt templates": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/prompt_templates/.ipynb_checkpoints/index-checkpoint/", "custom_llm.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/custom_llm/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "batch_configurable_chain([\"ice cream\", \"spaghetti\", \"dumplings\"])": "https://python.langchain.com/v0.2/docs/expression_language/why/", "> ChatPromptValue(messages=[HumanMessage(content='tell me a short joke about ice cream')])": "https://python.langchain.com/v0.2/docs/expression_language/get_started/", "The input schema of the chain is the input schema of its first part, the prompt.": "https://python.langchain.com/v0.2/docs/expression_language/interface/", "prompt_llm_parser.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_llm_parser/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/", "multiple_chains.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/multiple_chains/", "code_writing.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/code_writing/", "inspect.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/inspect/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/", "decorator.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/decorator/", "Adding values to chain state {#adding-values-to-chain-state}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/assign/", "Binding: Attach runtime args {#binding-attach-runtime-args}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/binding/", "Formatting inputs & output {#formatting-inputs-output}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/parallel/", "Passing data through {#passing-data-through}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/passthrough/", "Run custom functions {#run-custom-functions}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/functions/", "Chaining runnables {#chaining-runnables}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/sequence/"}, "ChatAnthropic": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "open ../../../static/img/brand/wordmark.png as base64 str": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "Define the neural network": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "If this is your first time using playwright, you'll have to install a browser executable.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/quick_start/", "Note that we set max_retries = 0 to avoid retrying on RateLimits, etc": "https://python.langchain.com/v0.2/docs/guides/productionization/fallbacks/", "ANTHROPIC_API_KEY=": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/trajectory_eval/", "The prompt was assigned to the evaluator": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/comparison/pairwise_string/", "This is equivalent to loading using the enum": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/criteria_eval_chain/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/multiple_tools/", "Set up a parser": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/parse/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/xml_agent/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/", "!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/", "streaming.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/streaming/", "structured_output.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/structured_output/", "response_metadata.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/response_metadata/", "The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/", "xml.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/xml/", "This is a prompt template used to format each individual example.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples_chat/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "batch_configurable_chain([\"ice cream\", \"spaghetti\", \"dumplings\"])": "https://python.langchain.com/v0.2/docs/expression_language/why/", "Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/", "Configure chain internals at runtime {#configure-chain-internals-at-runtime}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/configure/", "Chaining runnables {#chaining-runnables}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/sequence/"}, "ChatOpenAI": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "Wait for the fine-tuning to complete (this may take some time)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_dataset/", "re_phrase.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/tools/you/", "fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "outline.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/outline/", "get a token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/retrievers/arxiv/", "Setup API keys for Kay and OpenAI": "https://python.langchain.com/v0.2/docs/integrations/retrievers/sec_filings/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/toolkits/polygon/", "# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "Setup API key": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kay/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/tencentvectordb/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "chatgpt_plugins.md": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "Specify your Connery Runner credentials.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/", "How to use it inside an Agent {#how-to-use-it-inside-an-agent}": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/", "Artifacts are charts created by matplotlib when `plt.show()` is called": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/", "Answer with 'Zhu'": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "How YahooFinanceNewsTool works? {#how-yahoofinancenewstool-works}": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/", "start by installing semanticscholar api": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/max_iterations/", "Extract pdf content": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/", "arxiv.md": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "bash.md": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/", "redis_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Remembrall": "https://python.langchain.com/v0.2/docs/integrations/memory/remembrall/", "Optionally, specify your own session_state key for storing messages": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "copy from tidb cloud console": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "openai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/openai/", "get a chat completion from the formatted messages": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "labelstudio.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "promptlayer.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "trubrics.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "Install necessary dependencies.": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "arthur_tracking.md": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/dataherald-checkpoint/", "Construct the OpenAI Tools agent": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "Create a dataframe": "https://python.langchain.com/v0.2/docs/integrations/toolkits/csv/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "Define the neural network": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "fictional example": "https://python.langchain.com/v0.2/docs/integrations/toolkits/powerbi/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/", "airbyte_structured_qa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/airbyte_structured_qa/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "Note, you can also connect to Spark via Spark connect. For example:": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark_sql/", "IMPORTANT: If you plan to use this account in the future, make sure to save the": "https://python.langchain.com/v0.2/docs/integrations/toolkits/ainetwork/", "cogniswitch.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cogniswitch/", "pandas.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/pandas/", "Install package": "https://python.langchain.com/v0.2/docs/integrations/toolkits/robocorp/", "Authorize connection to your Browser extention": "https://python.langchain.com/v0.2/docs/integrations/toolkits/multion/", "NOTE: In this example. We must set `allow_dangerous_request=True` to enable the OpenAPI Agent to automatically use the Request Tool.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "openai": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Clean up KDB.AI \"documents\" table and index for similarity search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Must be an OpenAI model that supports functions": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "see https://python.langchain.com/en/latest/modules/data_connection/getting_started.html for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "Creating and executing the seeding query": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/", "rdflib_sparql.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/rdflib_sparql/", "connect ngql jupyter extension to nebulagraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/nebula_graph/", "graph.refresh_schema()": "https://python.langchain.com/v0.2/docs/integrations/graphs/kuzu_db/", "diffbot.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/", "feeding the schema using a user construct query": "https://python.langchain.com/v0.2/docs/integrations/graphs/ontotext/", "How many people played in Top Gun?": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "Instantiate ArangoDB Database": "https://python.langchain.com/v0.2/docs/integrations/graphs/arangodb/", "amazon_neptune_open_cypher.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/", "falkordb.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/falkordb/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Debugging": "https://python.langchain.com/v0.2/docs/guides/development/debugging/", "Note that we set max_retries = 0 to avoid retrying on RateLimits, etc": "https://python.langchain.com/v0.2/docs/guides/productionization/fallbacks/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "Reversible data anonymization with Microsoft Presidio {#reversible-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/reversible/", "Download model": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/index/", "Initialize the language model": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/examples/comparisons/", "ANTHROPIC_API_KEY=": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/trajectory_eval/", "custom.md": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/custom/", "Correct": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/scoring_eval_chain/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/prompting/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/quickstart/", "%pip install -qU langchain langchain-community langchain-openai langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/no_queries/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/step_back/", "%pip install -qU langchain langchain-community langchain-openai faker langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/high_cardinality/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/hyde/", "Optional, uncomment to trace runs with LangSmith. Sign up here: https://smith.langchain.com.": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/routing/", "%pip install -qU langchain langchain-openai youtube-transcript-api pytube": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/structuring/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "Install a model capable of tool calling": "https://python.langchain.com/v0.2/docs/use_cases/extraction/quickstart/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "Using LangSmith is recommended but not required. Uncomment below lines to use.": "https://python.langchain.com/v0.2/docs/use_cases/sql/csv/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/index/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/", "This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/", "tools_as_openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/tools/tools_as_openai_functions/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "To enable streaming, we pass in `streaming=True` to the ChatModel constructor": "https://python.langchain.com/v0.2/docs/modules/callbacks/async_callbacks/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/", "pip install wikipedia": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/intermediate_steps/", "need to use GPT-4 here as GPT-3.5 does not understand, however hard you insist, that": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_iter/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/", "!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/", "structured_output.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/structured_output/", "logprobs.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/logprobs/", "response_metadata.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/response_metadata/", "structured.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/structured/", "csv.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/csv/", "Define your desired data structure.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/pydantic/", "retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/", "enum.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/enum/", "openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_functions/", "Solely for documentation purposes.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/pandas_dataframe/", "output_fixing.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/output_fixing/", "openai_tools.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_tools/", "composition.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/composition/", "Prompt templates": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/prompt_templates/.ipynb_checkpoints/index-checkpoint/", "batch_configurable_chain([\"ice cream\", \"spaghetti\", \"dumplings\"])": "https://python.langchain.com/v0.2/docs/expression_language/why/", "The input schema of the chain is the input schema of its first part, the prompt.": "https://python.langchain.com/v0.2/docs/expression_language/interface/", "prompt_llm_parser.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_llm_parser/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/", "multiple_chains.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/multiple_chains/", "code_writing.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/code_writing/", "inspect.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/inspect/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/", "decorator.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/decorator/", "Adding values to chain state {#adding-values-to-chain-state}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/assign/", "Binding: Attach runtime args {#binding-attach-runtime-args}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/binding/", "Formatting inputs & output {#formatting-inputs-output}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/parallel/", "Passing data through {#passing-data-through}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/passthrough/", "Configure chain internals at runtime {#configure-chain-internals-at-runtime}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/configure/", "Run custom functions {#run-custom-functions}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/functions/"}, "SystemMessage": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/chat/fireworks/", "octoai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/octoai/", "service url": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_edge/", "Note that each chunk may contain more than one \"token\"": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/", "Konko {#konko}": "https://python.langchain.com/v0.2/docs/integrations/chat/konko/", "openai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/openai/", "gigachat.md": "https://python.langchain.com/v0.2/docs/integrations/chat/gigachat/", "get a chat completion from the formatted messages": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "Let\u2019s try out LLAMA model offered on EverlyAI Hosted Endpoints {#lets-try-out-llama-model-offered-on-everlyai-hosted-endpoints}": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/", "friendli.md": "https://python.langchain.com/v0.2/docs/integrations/chat/friendli/", "zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "yuan2.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/chat/tongyi/", "Generate your api key from: https://platform.moonshot.cn/console/api-keys": "https://python.langchain.com/v0.2/docs/integrations/chat/moonshot/", "First step is to set up the env variable.": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "Let\u2019s try out each model offered on Anyscale Endpoints {#lets-try-out-each-model-offered-on-anyscale-endpoints}": "https://python.langchain.com/v0.2/docs/integrations/chat/anyscale/", "yandex.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yandex/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "labelstudio.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "trubrics.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Install package": "https://python.langchain.com/v0.2/docs/integrations/toolkits/robocorp/", "Step 4: Embeddings Example {#step-4-embeddings-example}": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/few_shot/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/decomposition/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/quick_start/", "composition.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/composition/", "Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/"}, "HumanMessage": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "azureml_chat_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/", "alibaba_cloud_pai_eas.md": "https://python.langchain.com/v0.2/docs/integrations/chat/alibaba_cloud_pai_eas/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/chat/fireworks/", "octoai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/octoai/", "get a new token: https://deepinfra.com/login?from=%2Fdash": "https://python.langchain.com/v0.2/docs/integrations/chat/deepinfra/", "open ../../../static/img/brand/wordmark.png as base64 str": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "litellm.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/", "service url": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_edge/", "Note that each chunk may contain more than one \"token\"": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/", "Schema": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama_functions/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/chat/tongyi/", "Konko {#konko}": "https://python.langchain.com/v0.2/docs/integrations/chat/konko/", "openai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/openai/", "for running these examples in the notebook:": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "bedrock.md": "https://python.langchain.com/v0.2/docs/integrations/chat/bedrock/", "gigachat.md": "https://python.langchain.com/v0.2/docs/integrations/chat/gigachat/", "get a chat completion from the formatted messages": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "LangChain supports many other chat models. Here, we're using Ollama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "azure_chat_openai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/azure_chat_openai/", "Let\u2019s try out LLAMA model offered on EverlyAI Hosted Endpoints {#lets-try-out-llama-model-offered-on-everlyai-hosted-endpoints}": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/", "gpt_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/", "litellm_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/", "friendli.md": "https://python.langchain.com/v0.2/docs/integrations/chat/friendli/", "If api_key is not passed, default behavior is to use the `MISTRAL_API_KEY` environment variable.": "https://python.langchain.com/v0.2/docs/integrations/chat/mistralai/", "zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "baichuan.md": "https://python.langchain.com/v0.2/docs/integrations/chat/baichuan/", "baidu_qianfan_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/chat/baidu_qianfan_endpoint/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "edenai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/edenai/", "ernie.md": "https://python.langchain.com/v0.2/docs/integrations/chat/ernie/", "tencent_hunyuan.md": "https://python.langchain.com/v0.2/docs/integrations/chat/tencent_hunyuan/", "minimax.md": "https://python.langchain.com/v0.2/docs/integrations/chat/minimax/", "yuan2.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "promptlayer_chatopenai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/promptlayer_chatopenai/", "sparkllm.md": "https://python.langchain.com/v0.2/docs/integrations/chat/sparkllm/", "Generate your api key from: https://platform.moonshot.cn/console/api-keys": "https://python.langchain.com/v0.2/docs/integrations/chat/moonshot/", "dappier.md": "https://python.langchain.com/v0.2/docs/integrations/chat/dappier/", "First step is to set up the env variable.": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "Let\u2019s try out each model offered on Anyscale Endpoints {#lets-try-out-each-model-offered-on-anyscale-endpoints}": "https://python.langchain.com/v0.2/docs/integrations/chat/anyscale/", "yandex.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yandex/", "Or via the async API": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "labelstudio.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "promptlayer.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "trubrics.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "-> content='Hello! How can I assist you today?'": "https://python.langchain.com/v0.2/docs/integrations/providers/databricks/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "arthur_tracking.md": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "google_vertex_ai_palm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "If running a Databricks notebook attached to an interactive cluster in \"single user\"": "https://python.langchain.com/v0.2/docs/integrations/llms/databricks/", "Step 4: Embeddings Example {#step-4-embeddings-example}": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/few_shot/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/decomposition/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "tools_as_openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/tools/tools_as_openai_functions/", "To enable streaming, we pass in `streaming=True` to the ChatModel constructor": "https://python.langchain.com/v0.2/docs/modules/callbacks/async_callbacks/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/quick_start/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_tools/", "Construct the Tools agent": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/tool_calling/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/", "Tool calling {#tool-calling}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/function_calling/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/", "composition.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/composition/", "Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/"}, "RunnableMap": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/parallel/"}, "RunnableLambda": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/parallel/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/", "Run custom functions {#run-custom-functions}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/functions/"}, "MessagesPlaceholder": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "redis_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "Optionally, specify your own session_state key for storing messages": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "copy from tidb cloud console": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/few_shot/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/decomposition/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/agents/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "Install a model capable of tool calling": "https://python.langchain.com/v0.2/docs/use_cases/extraction/quickstart/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/", "Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/"}, "ToolMessage": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "open ../../../static/img/brand/wordmark.png as base64 str": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/few_shot/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/decomposition/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "Tool calling {#tool-calling}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/function_calling/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "tool": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Construct the OpenAI Tools agent": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "jsonformer_experimental.md": "https://python.langchain.com/v0.2/docs/integrations/llms/jsonformer_experimental/", "ANTHROPIC_API_KEY=": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/trajectory_eval/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/agents/", "If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/prompting/", "Import things that are needed generically": "https://python.langchain.com/v0.2/docs/modules/tools/custom_tools/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/", "Tool calling {#tool-calling}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/function_calling/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/"}, "convert_to_openai_tool": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "Function calling": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/function_calling-checkpoint/"}, "TavilySearchResults": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/tools/tavily_search/", "zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/tool_usage/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_tools/", "Construct the Tools agent": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/tool_calling/"}, "format_tool_to_openai_function": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/"}, "BaseMessage": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/few_shot/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/decomposition/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "FunctionMessage": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "AgentAction": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "custom.md": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/custom/", "First, define custom callback handler implementations": "https://python.langchain.com/v0.2/docs/modules/callbacks/multiple_callbacks/"}, "AgentFinish": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "openai_assistants.md": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_assistants/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/"}, "create_openai_functions_agent": {"\ud83e\udd9c\ud83d\udd78\ufe0fLangGraph": "https://python.langchain.com/v0.2/docs/langgraph/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/toolkits/polygon/", "How to use it inside an Agent {#how-to-use-it-inside-an-agent}": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/", "start by installing semanticscholar api": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_functions_agent/", "For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/tools/you/", "Define the neural network": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "Authorize connection to your Browser extention": "https://python.langchain.com/v0.2/docs/integrations/toolkits/multion/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "pip install wikipedia": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/intermediate_steps/", "need to use GPT-4 here as GPT-3.5 does not understand, however hard you insist, that": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_iter/"}, "tracing_v2_enabled": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/"}, "AgentExecutor": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/agents/", "How to use it inside an Agent {#how-to-use-it-inside-an-agent}": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/", "start by installing semanticscholar api": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/max_iterations/", "memorize.md": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "arxiv.md": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/tools/you/", "Based on ReAct Agent": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/streamlit/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/dataherald-checkpoint/", "Construct the OpenAI Tools agent": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "Define the neural network": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/", "Install package": "https://python.langchain.com/v0.2/docs/integrations/toolkits/robocorp/", "Authorize connection to your Browser extention": "https://python.langchain.com/v0.2/docs/integrations/toolkits/multion/", "azure_ai_services.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_ai_services/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/tool_usage/", "agent_with_memory_in_db.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory_in_db/", "agent_with_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Construct the Tools agent": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/tool_calling/", "openai_assistants.md": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_assistants/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/", "pip install wikipedia": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/intermediate_steps/", "need to use GPT-4 here as GPT-3.5 does not understand, however hard you insist, that": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_iter/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/", "!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/"}, "format_to_openai_tool_messages": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/"}, "OpenAIToolsAgentOutputParser": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/"}, "DuckDuckGoSearchResults": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "ddg.md": "https://python.langchain.com/v0.2/docs/integrations/tools/ddg/"}, "AgentType": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "chatgpt_plugins.md": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "Specify your Connery Runner credentials.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/", "use it with langchain {#use-it-with-langchain}": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "google_serper.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "Artifacts are charts created by matplotlib when `plt.show()` is called": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/", "Answer with 'Zhu'": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "How YahooFinanceNewsTool works? {#how-yahoofinancenewstool-works}": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/", "google_finance.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "awslambda.md": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "folder_id='1yucgL9WGgWZdM1TOuKkeghlPizuzMYb5'": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "openweathermap.md": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "memorize.md": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "search_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/search_tools/", "eleven_labs_tts.md": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "Extract pdf content": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/", "get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "graphql.md": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "searchapi.md": "https://python.langchain.com/v0.2/docs/integrations/tools/searchapi/", "edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "bash.md": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/", "Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Connect to Comet if no API Key is set": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "SCENARIO 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "wandb documentation to configure wandb using env variables": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "Setup and use the ClearML Callback": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "Create a dataframe": "https://python.langchain.com/v0.2/docs/integrations/toolkits/csv/", "jira.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "For Windows/Linux": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_cognitive_services/", "Select the LLM to use. Here, we use gpt-3.5-turbo-instruct": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/", "steam.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/", "airbyte_structured_qa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/airbyte_structured_qa/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/", "Copilot Sandbox": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/", "IMPORTANT: If you plan to use this account in the future, make sure to save the": "https://python.langchain.com/v0.2/docs/integrations/toolkits/ainetwork/", "If this is your first time using playwright, you'll have to install a browser executable.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/office365/", "pandas.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/pandas/", "nasa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/", "These are sample parameters for Falcon 40B Instruct Deployed from Amazon SageMaker JumpStart": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/", "Debugging": "https://python.langchain.com/v0.2/docs/guides/development/debugging/", "Using https://huggingface.co/laiyer/deberta-v3-base-prompt-injection": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/hugging_face_prompt_injection/", "Initialize the language model": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/examples/comparisons/", "ANTHROPIC_API_KEY=": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/trajectory_eval/", "First, define custom callback handler implementations": "https://python.langchain.com/v0.2/docs/modules/callbacks/multiple_callbacks/", "need to use GPT-4 here as GPT-3.5 does not understand, however hard you insist, that": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_iter/", "token_usage_tracking.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/token_usage_tracking/"}, "initialize_agent": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "chatgpt_plugins.md": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "Specify your Connery Runner credentials.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/", "use it with langchain {#use-it-with-langchain}": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "google_serper.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "Artifacts are charts created by matplotlib when `plt.show()` is called": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/", "Answer with 'Zhu'": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "How YahooFinanceNewsTool works? {#how-yahoofinancenewstool-works}": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/", "google_finance.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "awslambda.md": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "folder_id='1yucgL9WGgWZdM1TOuKkeghlPizuzMYb5'": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "openweathermap.md": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "memorize.md": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "search_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/search_tools/", "eleven_labs_tts.md": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "Extract pdf content": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/", "get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "graphql.md": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "searchapi.md": "https://python.langchain.com/v0.2/docs/integrations/tools/searchapi/", "gradio_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/gradio_tools/", "sceneXplain.md": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Needed if you would like to display images in the notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "bash.md": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/", "Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "LLM Hyperparameters": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Connect to Comet if no API Key is set": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "SCENARIO 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "wandb documentation to configure wandb using env variables": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "Setup and use the ClearML Callback": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "jira.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "For Windows/Linux": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_cognitive_services/", "Select the LLM to use. Here, we use gpt-3.5-turbo-instruct": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/", "steam.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/", "Copilot Sandbox": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/", "IMPORTANT: If you plan to use this account in the future, make sure to save the": "https://python.langchain.com/v0.2/docs/integrations/toolkits/ainetwork/", "If this is your first time using playwright, you'll have to install a browser executable.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/office365/", "nasa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/", "These are sample parameters for Falcon 40B Instruct Deployed from Amazon SageMaker JumpStart": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/", "Debugging": "https://python.langchain.com/v0.2/docs/guides/development/debugging/", "Using https://huggingface.co/laiyer/deberta-v3-base-prompt-injection": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/hugging_face_prompt_injection/", "Initialize the language model": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/examples/comparisons/", "ANTHROPIC_API_KEY=": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/trajectory_eval/", "First, define custom callback handler implementations": "https://python.langchain.com/v0.2/docs/modules/callbacks/multiple_callbacks/", "need to use GPT-4 here as GPT-3.5 does not understand, however hard you insist, that": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_iter/", "token_usage_tracking.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/token_usage_tracking/"}, "load_tools": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/google-checkpoint/", "chatgpt_plugins.md": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "use it with langchain {#use-it-with-langchain}": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "Answer with 'Zhu'": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "google_finance.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "awslambda.md": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "folder_id='1yucgL9WGgWZdM1TOuKkeghlPizuzMYb5'": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "Each tool wrapps a requests wrapper": "https://python.langchain.com/v0.2/docs/integrations/tools/requests/", "openweathermap.md": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "memorize.md": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "search_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/search_tools/", "eleven_labs_tts.md": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "arxiv.md": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "graphql.md": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "sceneXplain.md": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "Needed if you would like to display images in the notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "LLM Hyperparameters": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Connect to Comet if no API Key is set": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint/", "SerpAPI": "https://python.langchain.com/v0.2/docs/integrations/providers/serpapi/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Golden": "https://python.langchain.com/v0.2/docs/integrations/providers/golden/", "os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "Wolfram Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/wolfram_alpha/", "SCENARIO 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "DataForSEO": "https://python.langchain.com/v0.2/docs/integrations/providers/dataforseo/", "SearxNG Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/searx/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "OpenWeatherMap": "https://python.langchain.com/v0.2/docs/integrations/providers/openweathermap/", "Stack Exchange": "https://python.langchain.com/v0.2/docs/integrations/providers/stackexchange/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "wandb documentation to configure wandb using env variables": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "Setup and use the ClearML Callback": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/dataherald-checkpoint/", "These are sample parameters for Falcon 40B Instruct Deployed from Amazon SageMaker JumpStart": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/", "Debugging": "https://python.langchain.com/v0.2/docs/guides/development/debugging/", "First, define custom callback handler implementations": "https://python.langchain.com/v0.2/docs/modules/callbacks/multiple_callbacks/", "!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/", "token_usage_tracking.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/token_usage_tracking/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/"}, "EvaluatorType": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/", "This is equivalent to loading using the enum": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/criteria_eval_chain/"}, "RunEvalConfig": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/"}, "arun_on_dataset": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/"}, "run_on_dataset": {"Used by the agent in this tutorial": "https://python.langchain.com/v0.2/docs/langsmith/walkthrough/"}, "BaseChatModel": {"Contribute Integrations": "https://python.langchain.com/v0.2/docs/contributing/integrations/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "deprecated": {"Contribute Integrations": "https://python.langchain.com/v0.2/docs/contributing/integrations/"}, "ChatSession": {"Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/"}, "map_ai_messages": {"Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "The file token.json stores the user's access and refresh tokens, and is": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/gmail/"}, "merge_chat_runs": {"Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/"}, "FolderFacebookMessengerChatLoader": {"This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/"}, "SingleFileFacebookMessengerChatLoader": {"This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/"}, "convert_messages_for_finetuning": {"This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "Wait for the fine-tuning to complete (this may take some time)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_dataset/"}, "StrOutputParser": {"This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "del os.environ['NVIDIA_API_KEY'] ## delete key and reset": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tavily/", "LangChain supports many other chat models. Here, we're using Ollama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "Loading the COMVEST 2024 notice": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "Or via the async API": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "Fiddler project and model names, used for model registration": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "From LangChain, import standard modules for prompting.": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/volcengine_maas/", "use default authN method API-key": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "AI21 Contextual Answer {#ai21-contextual-answer}": "https://python.langchain.com/v0.2/docs/integrations/llms/ai21/", "Quickstart": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/.ipynb_checkpoints/quickstart-checkpoint/", "Note that we set max_retries = 0 to avoid retrying on RateLimits, etc": "https://python.langchain.com/v0.2/docs/guides/productionization/fallbacks/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/step_back/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/hyde/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/query_checking/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/quickstart/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/index/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "batch_configurable_chain([\"ice cream\", \"spaghetti\", \"dumplings\"])": "https://python.langchain.com/v0.2/docs/expression_language/why/", "> ChatPromptValue(messages=[HumanMessage(content='tell me a short joke about ice cream')])": "https://python.langchain.com/v0.2/docs/expression_language/get_started/", "The input schema of the chain is the input schema of its first part, the prompt.": "https://python.langchain.com/v0.2/docs/expression_language/interface/", "prompt_llm_parser.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_llm_parser/", "multiple_chains.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/multiple_chains/", "code_writing.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/code_writing/", "Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/", "inspect.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/inspect/", "decorator.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/decorator/", "Adding values to chain state {#adding-values-to-chain-state}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/assign/", "Binding: Attach runtime args {#binding-attach-runtime-args}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/binding/", "Formatting inputs & output {#formatting-inputs-output}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/parallel/", "Passing data through {#passing-data-through}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/passthrough/", "Run custom functions {#run-custom-functions}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/functions/", "Chaining runnables {#chaining-runnables}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/sequence/"}, "convert_message_to_dict": {"Filter out tweets that reference other tweets, because it's a bit weird": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/twitter/"}, "AIMessage": {"Filter out tweets that reference other tweets, because it's a bit weird": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/twitter/", "Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "open ../../../static/img/brand/wordmark.png as base64 str": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "yuan2.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Install required dependencies": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/human_in_the_loop/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/few_shot/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/decomposition/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "Define a custom prompt to provide instructions and any additional context.": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/examples/", "Set up a parser": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/parse/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_tools/", "Construct the Tools agent": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/tool_calling/", "custom_agent.md": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/custom_agent/", "Tool calling {#tool-calling}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/function_calling/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/", "The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/", "composition.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/composition/", "Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/"}, "convert_pydantic_to_openai_function": {"Wait for the fine-tuning to complete (this may take some time)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/", "openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_functions/"}, "PydanticOutputFunctionsParser": {"Wait for the fine-tuning to complete (this may take some time)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/", "openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_functions/"}, "LangSmithRunChatLoader": {"Wait for the fine-tuning to complete (this may take some time)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/"}, "GMailLoader": {"The file token.json stores the user's access and refresh tokens, and is": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/gmail/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/"}, "SlackChatLoader": {"Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/slack/", "Slack": "https://python.langchain.com/v0.2/docs/integrations/providers/slack/"}, "WhatsAppChatLoader": {"Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/whatsapp/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/", "WhatsApp": "https://python.langchain.com/v0.2/docs/integrations/providers/whatsapp/", "whatsapp_chat.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/whatsapp_chat/"}, "LangSmithDatasetChatLoader": {"Wait for the fine-tuning to complete (this may take some time)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_dataset/"}, "IMessageChatLoader": {"This uses some example data": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/"}, "TelegramChatLoader": {"Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/telegram/", "Telegram": "https://python.langchain.com/v0.2/docs/integrations/providers/telegram/"}, "base": {"Merge consecutive messages from the same sender into a single message": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/"}, "BookendEmbeddings": {"bookend.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/bookend/"}, "HuggingFaceBgeEmbeddings": {"bge_huggingface.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/bge_huggingface/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/"}, "QuantizedBiEncoderEmbeddings": {"optimum_intel.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/optimum_intel/", "Intel": "https://python.langchain.com/v0.2/docs/integrations/providers/intel/"}, "FireworksEmbeddings": {"Using the Embedding Model {#using-the-embedding-model}": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/fireworks/"}, "XinferenceEmbeddings": {"xinference.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/xinference/"}, "LLMRailsEmbeddings": {"llm_rails.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/llm_rails/"}, "DeepInfraEmbeddings": {"sign up for an account: https://deepinfra.com/login?utm_source=langchain": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/deepinfra/", "DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/providers/deepinfra/"}, "HuggingFaceEmbeddings": {"huggingfacehub.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/huggingfacehub/", "Equivalent to SentenceTransformerEmbeddings(model_name=\"all-MiniLM-L6-v2\")": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sentence_transformers/", "Get 3 diff embeddings.": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "Refresh is required for server use": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/", "scann.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "default metric is angular": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "tiledb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/", "%pip install --upgrade --quiet surrealdb langchain langchain-community": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "Ensure that all we need is installed": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/infinispanvs/", "Create collection if running for the first time. If the collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "pairwise_embedding_distance.md": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/comparison/pairwise_embedding_distance/", "You can load by enum or by raw python string": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/embedding_distance/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/", "Get embeddings.": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/long_context_reorder/"}, "HuggingFaceInferenceAPIEmbeddings": {"huggingfacehub.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/huggingfacehub/"}, "HuggingFaceHubEmbeddings": {"huggingfacehub.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/huggingfacehub/", "text_embeddings_inference.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/text_embeddings_inference/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/"}, "GoogleGenerativeAIEmbeddings": {"google_generative_ai.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/google_generative_ai/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/"}, "GPT4AllEmbeddings": {"gpt4all.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/gpt4all/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/"}, "MosaicMLInstructorEmbeddings": {"sign up for an account: https://forms.mosaicml.com/demo?utm_source=langchain": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/mosaicml/"}, "QuantizedBgeEmbeddings": {"itrex.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/itrex/", "Intel": "https://python.langchain.com/v0.2/docs/integrations/providers/intel/"}, "OpenAIEmbeddings": {"openai.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/openai/", "set the environment variables needed for openai package to know to reach out to azure": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/azureopenai/", "azure_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "re_phrase.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/", "Establishing a connection to the database is facilitated through the singlestoredb Python connector.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/", "knn.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/knn/", "initialize the index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/docarray_retriever/", "svm.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/svm/", "create the index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/pinecone_hybrid_search/", "# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "Get 3 diff embeddings.": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "or install latest:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/", "Get openAI api key by reading local .env file": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "create new index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "in case if some queries fail consider installing libdeeplake manually": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "import os": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Here we want to make sure the answer is minimally relevant": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "lancedb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "databricks_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/", "xata.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/", "openai": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "connection to redis standalone at localhost, db 0, no password": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/", "output length: 4": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/", "replace": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/", "Set up the OpenAI Environment Variables": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "vikingdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/", "Wait until the cluster is ready for use.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/", "typesense.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "Here we useimport getpass": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/", "or shorter": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "Pip install necessary package {#pip-install-necessary-package}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "import": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/", "duckdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/", "for example": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/", "# if you plan to use bson serialization, install also:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "Get an OpenAI token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/", "use directly a `where_str` to delete": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/", "clickhouse.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/", "qdrant.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/qdrant/", "tigris.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/", "ecloud_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/", "with pip": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/", "If using the default Docker installation, use this instantiation instead:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/", "pinecone.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pinecone/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "Option 1: use an OpenAI account": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "usearch.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/", "Clean up KDB.AI \"documents\" table and index for similarity search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "Metadata {#metadata}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "epsilla.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/", "DocumentDB connection string": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "analyticdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/", "hologres.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/", "initialize MongoDB python client": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "Use Meilisearch vector store to store texts & associated embeddings as vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Uncomment this to install psychicapi if you don't already have it installed": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/", "Quickstart": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/.ipynb_checkpoints/quickstart-checkpoint/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/quickstart/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/quickstart/", "%pip install -qU langchain langchain-community langchain-openai langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/no_queries/", "%pip install -qU langchain langchain-community langchain-openai faker langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/high_cardinality/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/", "indexing.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/indexing/", "Text embedding models": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/index/", "Swapping the `ByteStore` {#swapping-the-bytestore}": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/caching_embeddings/", "initialize the bm25 retriever and faiss retriever": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/ensemble/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "Define your embedding model": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/time_weighted_vectorstore/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/", "This text splitter is used to create the child documents": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/parent_document_retriever/", "vectorstore.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/vectorstore/", "This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/", "Self-querying": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/self_query-checkpoint/", "Vector stores": "https://python.langchain.com/v0.2/docs/modules/data_connection/vectorstores/index/", "This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/semantic-chunker/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/", "adding_memory_chain_multiple_inputs.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory_chain_multiple_inputs/", "Backed by a Vector Store": "https://python.langchain.com/v0.2/docs/modules/memory/types/vectorstore_retriever_memory/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/", "Select the most similar example to the input.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples/", "This is a prompt template used to format each individual example.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples_chat/", "Examples of a pretend task of creating antonyms.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/similarity/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "> ChatPromptValue(messages=[HumanMessage(content='tell me a short joke about ice cream')])": "https://python.langchain.com/v0.2/docs/expression_language/get_started/", "The input schema of the chain is the input schema of its first part, the prompt.": "https://python.langchain.com/v0.2/docs/expression_language/interface/", "Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/", "inspect.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/inspect/", "Adding values to chain state {#adding-values-to-chain-state}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/assign/", "Formatting inputs & output {#formatting-inputs-output}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/parallel/", "Passing data through {#passing-data-through}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/passthrough/"}, "VertexAIEmbeddings": {"google_vertex_ai_palm.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/google_vertex_ai_palm/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_cloud_sql_pg/", "TODO : Set values as per your requirements": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "@markdown Please specify a source for demo purpose.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_firestore/"}, "BedrockEmbeddings": {"async embed query": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/bedrock/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "GigaChatEmbeddings": {"gigachat.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/gigachat/", "Salute Devices": "https://python.langchain.com/v0.2/docs/integrations/providers/salute_devices/"}, "OllamaEmbeddings": {"ollama.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ollama/", "Ollama": "https://python.langchain.com/v0.2/docs/integrations/providers/ollama/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/"}, "OCIGenAIEmbeddings": {"use default authN method API-key": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/"}, "FastEmbedEmbeddings": {"fastembed.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/fastembed/"}, "LlamaCppEmbeddings": {"llamacpp.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/llamacpp/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/providers/llamacpp/"}, "NLPCloudEmbeddings": {"nlp_cloud.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nlp_cloud/", "NLPCloud": "https://python.langchain.com/v0.2/docs/integrations/providers/nlpcloud/"}, "LaserEmbeddings": {"Ex Instantiationz": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/laser/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/"}, "OpenCLIPEmbeddings": {"Image URIs": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/open_clip/", "Establishing a connection to the database is facilitated through the singlestoredb Python connector.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/"}, "TitanTakeoffEmbed": {"Model config for the embedding model, where you can specify the following parameters:": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/titan_takeoff/"}, "MistralAIEmbeddings": {"pip install -U langchain-mistralai": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/mistralai/", "mistralai.md": "https://python.langchain.com/v0.2/docs/integrations/providers/mistralai/"}, "SpacyEmbeddings": {"spacy_embedding.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/spacy_embedding/", "spaCy": "https://python.langchain.com/v0.2/docs/integrations/providers/spacy/"}, "BaichuanTextEmbeddings": {"baichuan.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/baichuan/", "Baichuan": "https://python.langchain.com/v0.2/docs/integrations/providers/baichuan/"}, "TogetherEmbeddings": {"install package": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/together/", "together.md": "https://python.langchain.com/v0.2/docs/integrations/providers/together/"}, "HuggingFaceInstructEmbeddings": {"instruct_embeddings.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/instruct_embeddings/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/"}, "QianfanEmbeddingsEndpoint": {"baidu_qianfan_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/baidu_qianfan_endpoint/", "ernie.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ernie/", "Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/", "Create a bes instance and index docs.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/"}, "CohereEmbeddings": {"cohere.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/cohere/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "See docker command above to launch a postgres instance with pgvector enabled.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvector/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "Text embedding models": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/index/"}, "EdenAiEmbeddings": {"edenai.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/edenai/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "JohnSnowLabsEmbeddings": {"If you have a enterprise license, you can run this to install enterprise features": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/johnsnowlabs_embedding/"}, "ErnieEmbeddings": {"ernie.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ernie/"}, "LLMChain": {"Dependencies {#dependencies}": "https://python.langchain.com/v0.2/docs/integrations/llms/clarifai/", "re_phrase.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "memorize.md": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "Needed if you would like to display images in the notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "loads previous state from Mot\u00f6rhead \ud83e\udd18": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/", "!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "LLM Hyperparameters": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "!pip3 install rebuff openai -U": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Prediction Guard": "https://python.langchain.com/v0.2/docs/integrations/providers/predictionguard/", "SCENARIO 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Shale Protocol": "https://python.langchain.com/v0.2/docs/integrations/providers/shaleprotocol/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "0: Import ray serve and request from starlette": "https://python.langchain.com/v0.2/docs/integrations/providers/ray_serve/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/minimax/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "stochasticai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/stochasticai/", "solar.md": "https://python.langchain.com/v0.2/docs/integrations/llms/solar/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "Update Langchain": "https://python.langchain.com/v0.2/docs/integrations/llms/ipex_llm/", "Install the package https://docs.banana.dev/banana-docs/core-concepts/sdks/python": "https://python.langchain.com/v0.2/docs/integrations/llms/banana/", "alibabacloud_pai_eas_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/llms/alibabacloud_pai_eas_endpoint/", "openllm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/openllm/", "octoai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/octoai/", "If you get an error, probably, you need to set up the \"base_url\" parameter that can be taken from the error log.": "https://python.langchain.com/v0.2/docs/integrations/llms/writer/", "Register an account with Modal and get a new token.": "https://python.langchain.com/v0.2/docs/integrations/llms/modal/", "textgen.md": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "xinference.md": "https://python.langchain.com/v0.2/docs/integrations/llms/xinference/", "symblai_nebula.md": "https://python.langchain.com/v0.2/docs/integrations/llms/symblai_nebula/", "get a new token: https://deepinfra.com/login?from=%2Fdash": "https://python.langchain.com/v0.2/docs/integrations/llms/deepinfra/", "get a token: https://docs.nlpcloud.com/#authentication": "https://python.langchain.com/v0.2/docs/integrations/llms/nlpcloud/", "Callbacks support token-wise streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/gpt4all/", "get a new token: https://docs.forefront.ai/forefront/api-reference/authentication": "https://python.langchain.com/v0.2/docs/integrations/llms/forefrontai/", "sign up for an account: https://forms.mosaicml.com/demo?utm_source=langchain": "https://python.langchain.com/v0.2/docs/integrations/llms/mosaicml/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/pipelineai/", "get a token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/llms/openai/", "gigachat.md": "https://python.langchain.com/v0.2/docs/integrations/llms/gigachat/", "use default authN method API-key": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "%pip list | grep aphrodite": "https://python.langchain.com/v0.2/docs/integrations/llms/aphrodite/", "Run the chain specifying only the input variable for the first chain.": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "Optional, add your OpenAI API Key. This is optional, as Prediction Guard allows": "https://python.langchain.com/v0.2/docs/integrations/llms/predictionguard/", "Step 4: Embeddings Example {#step-4-embeddings-example}": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "Calling a single prompt": "https://python.langchain.com/v0.2/docs/integrations/llms/ibm_watsonx/", "ctransformers.md": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/", "vllm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/", "azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/", "default infer_api for a local deployed Yuan2.0 inference server": "https://python.langchain.com/v0.2/docs/integrations/llms/yuan2/", "get a token: https://huggingface.co/docs/api-inference/quicktour#get-your-api-token": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/", "For an on-demand A100 with GCP, Azure, or Lambda": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/", "anyscale.md": "https://python.langchain.com/v0.2/docs/integrations/llms/anyscale/", "yandex.md": "https://python.langchain.com/v0.2/docs/integrations/llms/yandex/", "gooseai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/gooseai/", "Uncomment to install openlm and openai if you haven't already": "https://python.langchain.com/v0.2/docs/integrations/llms/openlm/", "Using streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/cloudflare_workersai/", "conversation can take several minutes": "https://python.langchain.com/v0.2/docs/integrations/llms/ctranslate2/", "Install required dependencies": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/", "Improve the results by fine-tuning (optional) {#improve-the-results-by-fine-tuning-optional}": "https://python.langchain.com/v0.2/docs/integrations/llms/gradient/", "this can take several minutes to download big files!": "https://python.langchain.com/v0.2/docs/integrations/llms/petals/", "magics to auto-reload external modules in case you are making changes to langchain while working on this notebook": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/", "Logical Fallacy chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/logical_fallacy_chain/", "Constitutional chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/constitutional_chain/", "custom.md": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/custom/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/apis/", "Get embeddings.": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/long_context_reorder/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "Callbacks": "https://python.langchain.com/v0.2/docs/modules/callbacks/index/", "composition.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/composition/"}, "ClarifaiEmbeddings": {"Dependencies {#dependencies}": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/clarifai/", "Clarifai": "https://python.langchain.com/v0.2/docs/integrations/providers/clarifai/"}, "PromptTemplate": {"Dependencies {#dependencies}": "https://python.langchain.com/v0.2/docs/integrations/llms/clarifai/", "re_phrase.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "folder_id='1yucgL9WGgWZdM1TOuKkeghlPizuzMYb5'": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_drive/", "get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "send data into the chain": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/", "and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "Needed if you would like to display images in the notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "loads previous state from Mot\u00f6rhead \ud83e\udd18": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/", "context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "LLM Hyperparameters": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "From LangChain, import standard modules for prompting.": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "!pip3 install rebuff openai -U": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Prediction Guard": "https://python.langchain.com/v0.2/docs/integrations/providers/predictionguard/", "SCENARIO 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Shale Protocol": "https://python.langchain.com/v0.2/docs/integrations/providers/shaleprotocol/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "0: Import ray serve and request from starlette": "https://python.langchain.com/v0.2/docs/integrations/providers/ray_serve/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "DocumentDB connection string": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "initialize MongoDB python client": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "airbyte.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte/", "Creating and executing the seeding query": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/", "How many people played in Top Gun?": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/minimax/", "stochasticai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/stochasticai/", "solar.md": "https://python.langchain.com/v0.2/docs/integrations/llms/solar/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "Update Langchain": "https://python.langchain.com/v0.2/docs/integrations/llms/ipex_llm/", "Install the package https://docs.banana.dev/banana-docs/core-concepts/sdks/python": "https://python.langchain.com/v0.2/docs/integrations/llms/banana/", "alibabacloud_pai_eas_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/llms/alibabacloud_pai_eas_endpoint/", "openllm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/openllm/", "sagemaker.md": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/", "octoai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/octoai/", "If you get an error, probably, you need to set up the \"base_url\" parameter that can be taken from the error log.": "https://python.langchain.com/v0.2/docs/integrations/llms/writer/", "Register an account with Modal and get a new token.": "https://python.langchain.com/v0.2/docs/integrations/llms/modal/", "textgen.md": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "xinference.md": "https://python.langchain.com/v0.2/docs/integrations/llms/xinference/", "symblai_nebula.md": "https://python.langchain.com/v0.2/docs/integrations/llms/symblai_nebula/", "get a new token: https://deepinfra.com/login?from=%2Fdash": "https://python.langchain.com/v0.2/docs/integrations/llms/deepinfra/", "anthropic.md": "https://python.langchain.com/v0.2/docs/integrations/llms/anthropic/", "get a token: https://docs.nlpcloud.com/#authentication": "https://python.langchain.com/v0.2/docs/integrations/llms/nlpcloud/", "Callbacks support token-wise streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "get a new token: https://docs.forefront.ai/forefront/api-reference/authentication": "https://python.langchain.com/v0.2/docs/integrations/llms/forefrontai/", "sign up for an account: https://forms.mosaicml.com/demo?utm_source=langchain": "https://python.langchain.com/v0.2/docs/integrations/llms/mosaicml/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/pipelineai/", "get a token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/llms/openai/", "google_vertex_ai_palm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "gigachat.md": "https://python.langchain.com/v0.2/docs/integrations/llms/gigachat/", "use default authN method API-key": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "huggingface_pipelines.md": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_pipelines/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Note importing TitanTakeoffPro instead of TitanTakeoff will work as well both use same object under the hood": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/", "%pip list | grep aphrodite": "https://python.langchain.com/v0.2/docs/integrations/llms/aphrodite/", "AI21 Contextual Answer {#ai21-contextual-answer}": "https://python.langchain.com/v0.2/docs/integrations/llms/ai21/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/llms/cohere/", "Run the chain specifying only the input variable for the first chain.": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "Optional, add your OpenAI API Key. This is optional, as Prediction Guard allows": "https://python.langchain.com/v0.2/docs/integrations/llms/predictionguard/", "Step 4: Embeddings Example {#step-4-embeddings-example}": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "Calling a single prompt": "https://python.langchain.com/v0.2/docs/integrations/llms/ibm_watsonx/", "ctransformers.md": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/", "vllm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/", "azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/", "Map reduce example": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/", "get a token: https://huggingface.co/docs/api-inference/quicktour#get-your-api-token": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/", "mlx_pipelines.md": "https://python.langchain.com/v0.2/docs/integrations/llms/mlx_pipelines/", "For an on-demand A100 with GCP, Azure, or Lambda": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/", "anyscale.md": "https://python.langchain.com/v0.2/docs/integrations/llms/anyscale/", "yandex.md": "https://python.langchain.com/v0.2/docs/integrations/llms/yandex/", "gooseai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/gooseai/", "Uncomment to install openlm and openai if you haven't already": "https://python.langchain.com/v0.2/docs/integrations/llms/openlm/", "Using streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/cloudflare_workersai/", "conversation can take several minutes": "https://python.langchain.com/v0.2/docs/integrations/llms/ctranslate2/", "google_ai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_ai/", "Install required dependencies": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/", "Improve the results by fine-tuning (optional) {#improve-the-results-by-fine-tuning-optional}": "https://python.langchain.com/v0.2/docs/integrations/llms/gradient/", "this can take several minutes to download big files!": "https://python.langchain.com/v0.2/docs/integrations/llms/petals/", "openvino.md": "https://python.langchain.com/v0.2/docs/integrations/llms/openvino/", "weight_only_quantization.md": "https://python.langchain.com/v0.2/docs/integrations/llms/weight_only_quantization/", "magics to auto-reload external modules in case you are making changes to langchain while working on this notebook": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/", "Note that we set max_retries = 0 to avoid retrying on RateLimits, etc": "https://python.langchain.com/v0.2/docs/guides/productionization/fallbacks/", "Logical Fallacy chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/logical_fallacy_chain/", "Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/", "Constitutional chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/constitutional_chain/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "Reversible data anonymization with Microsoft Presidio {#reversible-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/reversible/", "Download model": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/index/", "The prompt was assigned to the evaluator": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/comparison/pairwise_string/", "This is equivalent to loading using the enum": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/criteria_eval_chain/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "Get embeddings.": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/long_context_reorder/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "!python -m spacy download en_core_web_lg": "https://python.langchain.com/v0.2/docs/modules/memory/custom_memory/", "adding_memory_chain_multiple_inputs.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory_chain_multiple_inputs/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "Combined": "https://python.langchain.com/v0.2/docs/modules/memory/multiple_memory/", "Here it is by default set to \"AI\"": "https://python.langchain.com/v0.2/docs/modules/memory/conversational_customization/", "kg.md": "https://python.langchain.com/v0.2/docs/modules/memory/types/kg/", "Backed by a Vector Store": "https://python.langchain.com/v0.2/docs/modules/memory/types/vectorstore_retriever_memory/", "Callbacks": "https://python.langchain.com/v0.2/docs/modules/callbacks/index/", "this chain will both print to stdout (because verbose=True) and write to 'output.log'": "https://python.langchain.com/v0.2/docs/modules/callbacks/filecallbackhandler/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/", "Prompts": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/prompts-checkpoint/", "Define your desired data structure.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/pydantic/", "structured.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/structured/", "csv.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/csv/", "retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/", "enum.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/enum/", "datetime.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/datetime/", "Solely for documentation purposes.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/pandas_dataframe/", "xml.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/xml/", "Select the most similar example to the input.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples/", "partial.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/partial/", "composition.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/composition/", "Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/", "Examples of a pretend task of creating antonyms.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/similarity/", "index.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/index/", "Examples of a fictional translation task.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/ngram_overlap/", "Prompt templates": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/prompt_templates/.ipynb_checkpoints/index-checkpoint/", "Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/", "Configure chain internals at runtime {#configure-chain-internals-at-runtime}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/configure/"}, "AzureOpenAIEmbeddings": {"set the environment variables needed for openai package to know to reach out to azure": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/azureopenai/", "Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "Option 1: use an OpenAI account": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/"}, "InfinityEmbeddings": {"Option 1: Use infinity from Python {#option-1-use-infinity-from-python}": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/infinity/", "Infinity": "https://python.langchain.com/v0.2/docs/integrations/providers/infinity/"}, "InfinityEmbeddingsLocal": {"Option 1: Use infinity from Python {#option-1-use-infinity-from-python}": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/infinity/"}, "AwaEmbeddings": {"pip install awadb": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/awadb/", "AwaDB": "https://python.langchain.com/v0.2/docs/integrations/providers/awadb/"}, "VolcanoEmbeddings": {"volcengine.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/volcengine/"}, "MiniMaxEmbeddings": {"minimax.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/minimax/", "Minimax": "https://python.langchain.com/v0.2/docs/integrations/providers/minimax/"}, "FakeEmbeddings": {"fake.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/fake/", "initialize the index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/docarray_retriever/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vectara/", "drop first if index already exists": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_memorystore_redis/", "Run tests with shell:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "baiduvectordb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/"}, "NeMoEmbeddings": {"nemo.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nemo/"}, "NomicEmbeddings": {"install package": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nomic/", "nomic.md": "https://python.langchain.com/v0.2/docs/integrations/providers/nomic/"}, "SparkLLMTextEmbeddings": {"sparkllm.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sparkllm/"}, "PremAIEmbeddings": {"Let's start by doing some imports and define our embedding object": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/premai/"}, "ElasticsearchEmbeddings": {"Define the model ID": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/elasticsearch/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/providers/elasticsearch/"}, "VoyageAIEmbeddings": {"retrieve the most relevant documents": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/voyageai/", "VoyageAI": "https://python.langchain.com/v0.2/docs/integrations/providers/voyageai/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/"}, "KNNRetriever": {"retrieve the most relevant documents": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/voyageai/", "knn.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/knn/"}, "SelfHostedEmbeddings": {"For an on-demand A100 with GCP, Azure, or Lambda": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/self-hosted/"}, "SelfHostedHuggingFaceEmbeddings": {"For an on-demand A100 with GCP, Azure, or Lambda": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/self-hosted/"}, "SelfHostedHuggingFaceInstructEmbeddings": {"For an on-demand A100 with GCP, Azure, or Lambda": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/self-hosted/"}, "AnyscaleEmbeddings": {"anyscale.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/anyscale/", "Anyscale": "https://python.langchain.com/v0.2/docs/integrations/providers/anyscale/"}, "EmbaasEmbeddings": {"Set API key": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/embaas/"}, "YandexGPTEmbeddings": {"yandex.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/yandex/"}, "JinaEmbeddings": {"jina.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/jina/", "Jina": "https://python.langchain.com/v0.2/docs/integrations/providers/jina/"}, "AlephAlphaAsymmetricSemanticEmbedding": {"aleph_alpha.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/aleph_alpha/", "Aleph Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/aleph_alpha/"}, "AlephAlphaSymmetricSemanticEmbedding": {"aleph_alpha.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/aleph_alpha/", "Aleph Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/aleph_alpha/"}, "CloudflareWorkersAIEmbeddings": {"single string embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/cloudflare_workersai/", "Cloudflare": "https://python.langchain.com/v0.2/docs/integrations/providers/cloudflare/"}, "DashScopeEmbeddings": {"dashscope.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/dashscope/", "create DashVector collection": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "add texts": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/"}, "TensorflowHubEmbeddings": {"tensorflowhub.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/tensorflowhub/"}, "LlamafileEmbeddings": {"llamafile setup": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/llamafile/"}, "GradientEmbeddings": {"(demo) compute similarity": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/gradient/", "Gradient": "https://python.langchain.com/v0.2/docs/integrations/providers/gradient/"}, "ModelScopeEmbeddings": {"modelscope_hub.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/modelscope_hub/", "ModelScope": "https://python.langchain.com/v0.2/docs/integrations/providers/modelscope/"}, "SagemakerEndpointEmbeddings": {"client = boto3.client(": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sagemaker-endpoint/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "EmbeddingsContentHandler": {"client = boto3.client(": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sagemaker-endpoint/"}, "DocArrayInMemorySearch": {"async embed query": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/upstage/", "Get an OpenAI token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/", "> ChatPromptValue(messages=[HumanMessage(content='tell me a short joke about ice cream')])": "https://python.langchain.com/v0.2/docs/expression_language/get_started/"}, "OpenVINOEmbeddings": {"openvino.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/openvino/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/"}, "OpenVINOBgeEmbeddings": {"openvino.md": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/openvino/"}, "NVIDIAEmbeddings": {"del os.environ['NVIDIA_API_KEY'] ## delete key and reset": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "NVIDIA": "https://python.langchain.com/v0.2/docs/integrations/providers/nvidia/"}, "FAISS": {"del os.environ['NVIDIA_API_KEY'] ## delete key and reset": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "use default authN method API-key": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/", "Swapping the `ByteStore` {#swapping-the-bytestore}": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/caching_embeddings/", "initialize the bm25 retriever and faiss retriever": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/ensemble/", "Define your embedding model": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/time_weighted_vectorstore/", "vectorstore.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/vectorstore/", "Vector stores": "https://python.langchain.com/v0.2/docs/modules/data_connection/vectorstores/index/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/", "Backed by a Vector Store": "https://python.langchain.com/v0.2/docs/modules/memory/types/vectorstore_retriever_memory/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Examples of a pretend task of creating antonyms.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/mmr/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "The input schema of the chain is the input schema of its first part, the prompt.": "https://python.langchain.com/v0.2/docs/expression_language/interface/", "inspect.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/inspect/", "Adding values to chain state {#adding-values-to-chain-state}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/assign/", "Formatting inputs & output {#formatting-inputs-output}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/parallel/", "Passing data through {#passing-data-through}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/passthrough/"}, "RunnablePassthrough": {"del os.environ['NVIDIA_API_KEY'] ## delete key and reset": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "From LangChain, import standard modules for prompting.": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "use default authN method API-key": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/prompting/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "Quickstart": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/.ipynb_checkpoints/quickstart-checkpoint/", "%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/quickstart/", "%pip install -qU langchain langchain-community langchain-openai langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/no_queries/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/step_back/", "%pip install -qU langchain langchain-community langchain-openai faker langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/high_cardinality/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/hyde/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/quickstart/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/index/", "Tool calling {#tool-calling}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/function_calling/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "batch_configurable_chain([\"ice cream\", \"spaghetti\", \"dumplings\"])": "https://python.langchain.com/v0.2/docs/expression_language/why/", "> ChatPromptValue(messages=[HumanMessage(content='tell me a short joke about ice cream')])": "https://python.langchain.com/v0.2/docs/expression_language/get_started/", "The input schema of the chain is the input schema of its first part, the prompt.": "https://python.langchain.com/v0.2/docs/expression_language/interface/", "prompt_llm_parser.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_llm_parser/", "multiple_chains.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/multiple_chains/", "Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/", "inspect.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/inspect/", "Adding values to chain state {#adding-values-to-chain-state}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/assign/", "Binding: Attach runtime args {#binding-attach-runtime-args}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/binding/", "Formatting inputs & output {#formatting-inputs-output}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/parallel/", "Passing data through {#passing-data-through}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/passthrough/"}, "ChatNVIDIA": {"del os.environ['NVIDIA_API_KEY'] ## delete key and reset": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "Or via the async API": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "NVIDIA": "https://python.langchain.com/v0.2/docs/integrations/providers/nvidia/"}, "LocalAIEmbeddings": {"if you are behind an explicit proxy, you can use the OPENAI_PROXY environment variable to pass through": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/localai/"}, "AzureAISearchRetriever": {"azure_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/"}, "DirectoryLoader": {"azure_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "File Directory": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/file_directory/"}, "TextLoader": {"azure_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "Establishing a connection to the database is facilitated through the singlestoredb Python connector.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "Here we want to make sure the answer is minimally relevant": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "lancedb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/", "You need to install sqlite-vss as a dependency.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/", "Refresh is required for server use": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "add texts": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/", "databricks_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/", "scann.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "xata.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/", "openai": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "docs[0].metadata[\"id\"] == \"id:testapp:testapp::32\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/", "output length: 4": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/", "or install latest:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/", "replace": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/", "Set up the OpenAI Environment Variables": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "vikingdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/", "default metric is angular": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Wait until the cluster is ready for use.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/", "typesense.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/", "Here we useimport getpass": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/", "atlas.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/atlas/", "or shorter": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "Load the document and split it into chunks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vlite/", "Pip install necessary package {#pip-install-necessary-package}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "drop first if index already exists": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/", "import": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/", "duckdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/", "for example": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/", "Dependencies {#dependencies}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clarifai/", "# if you plan to use bson serialization, install also:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "Get an OpenAI token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/", "use directly a `where_str` to delete": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/", "tiledb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/", "clickhouse.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_memorystore_redis/", "qdrant.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/qdrant/", "tigris.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/", "ecloud_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/", "Create a bes instance and index docs.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/", "awadb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/awadb/", "with pip": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/", "%pip install --upgrade --quiet surrealdb langchain langchain-community": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/", "If using the default Docker installation, use this instantiation instead:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/", "pinecone.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pinecone/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/", "create cluster and add texts": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bageldb/", "Option 1: use an OpenAI account": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/", "usearch.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/milvus/", "Metadata {#metadata}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "Run tests with shell:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "initialize marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "epsilla.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/", "DocumentDB connection string": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "Create collection if running for the first time. If the collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/", "analyticdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/", "hologres.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/", "baiduvectordb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/", "Use Meilisearch vector store to store texts & associated embeddings as vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Swapping the `ByteStore` {#swapping-the-bytestore}": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/caching_embeddings/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "This text splitter is used to create the child documents": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/parent_document_retriever/", "vectorstore.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/vectorstore/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/", "Vector stores": "https://python.langchain.com/v0.2/docs/modules/data_connection/vectorstores/index/", "Document loaders": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/index/", "File Directory": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/file_directory/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/"}, "TokenTextSplitter": {"azure_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/split_by_token/"}, "AzureSearch": {"azure_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "Option 1: use an OpenAI account": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/"}, "RePhraseQueryRetriever": {"re_phrase.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/"}, "WebBaseLoader": {"re_phrase.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "Install necessary dependencies.": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "Collection config is needed if we're creating a new Zep Collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/", "merge_doc.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/merge_doc/", "Use this piece of code for testing new custom BeautifulSoup parsers": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/web_base/", "Quickstart": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/.ipynb_checkpoints/quickstart-checkpoint/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/quickstart/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/"}, "RecursiveCharacterTextSplitter": {"re_phrase.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Loading the COMVEST 2024 notice": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "TODO : Set values as per your requirements": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "vikingdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Collection config is needed if we're creating a new Zep Collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/", "initialize MongoDB python client": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Code for: class MyClass:": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/", "Quickstart": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/.ipynb_checkpoints/quickstart-checkpoint/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/quickstart/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/quickstart/", "%pip install -qU langchain langchain-community langchain-openai langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/no_queries/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/", "This text splitter is used to create the child documents": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/parent_document_retriever/", "Full list of supported languages": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/code_splitter/", "This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/split_by_token/", "for local file use html_splitter.split_text_from_file()": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/HTML_header_metadata/", "MD splits": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/markdown_header_metadata/", "Split": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/HTML_section_aware_splitter/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/"}, "YouSearchAPIWrapper": {"For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/tools/you/"}, "YouRetriever": {"For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/"}, "Jaguar": {"cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Jaguar": "https://python.langchain.com/v0.2/docs/integrations/providers/jaguar/"}, "CharacterTextSplitter": {"cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Establishing a connection to the database is facilitated through the singlestoredb Python connector.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "Here we want to make sure the answer is minimally relevant": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "lancedb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/", "You need to install sqlite-vss as a dependency.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/", "Refresh is required for server use": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "add texts": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/", "databricks_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/", "scann.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "xata.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/", "openai": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "docs[0].metadata[\"id\"] == \"id:testapp:testapp::32\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/", "output length: 4": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/", "or install latest:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/", "replace": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/", "Set up the OpenAI Environment Variables": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "default metric is angular": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Wait until the cluster is ready for use.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/", "typesense.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/", "Here we useimport getpass": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/", "or shorter": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "Load the document and split it into chunks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vlite/", "Pip install necessary package {#pip-install-necessary-package}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "drop first if index already exists": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/", "import": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/", "duckdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/", "for example": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/", "Dependencies {#dependencies}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clarifai/", "# if you plan to use bson serialization, install also:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "Get an OpenAI token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/", "use directly a `where_str` to delete": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/", "tiledb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/", "clickhouse.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_memorystore_redis/", "qdrant.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/qdrant/", "tigris.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/", "ecloud_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/", "Create a bes instance and index docs.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/", "awadb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/awadb/", "with pip": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/", "%pip install --upgrade --quiet surrealdb langchain langchain-community": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/", "If using the default Docker installation, use this instantiation instead:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/", "pinecone.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pinecone/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "create cluster and add texts": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bageldb/", "Option 1: use an OpenAI account": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/", "usearch.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/milvus/", "Metadata {#metadata}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "Run tests with shell:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "initialize marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "epsilla.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/", "DocumentDB connection string": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "Create collection if running for the first time. If the collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/", "analyticdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/", "hologres.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/", "baiduvectordb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/", "Use Meilisearch vector store to store texts & associated embeddings as vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/", "Uncomment this to install psychicapi if you don't already have it installed": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "Map reduce example": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "indexing.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/indexing/", "Swapping the `ByteStore` {#swapping-the-bytestore}": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/caching_embeddings/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/", "vectorstore.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/vectorstore/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/", "Vector stores": "https://python.langchain.com/v0.2/docs/modules/data_connection/vectorstores/index/", "This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/split_by_token/", "adding_memory_chain_multiple_inputs.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory_chain_multiple_inputs/"}, "MultiVectorRetriever": {"fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/"}, "Document": {"fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "STEP 1: Load": "https://python.langchain.com/v0.2/docs/integrations/retrievers/chatgpt-plugin/", "cohere.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere/", "client.schema.delete_all()": "https://python.langchain.com/v0.2/docs/integrations/retrievers/weaviate-hybrid/", "bm25.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bm25/", "Create a retriever with a demo encoder": "https://python.langchain.com/v0.2/docs/integrations/retrievers/qdrant-sparse/", "elasticsearch_retriever.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/", "tf_idf.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tf_idf/", "This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "create DashVector collection": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "or install latest:": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "Get openAI api key by reading local .env file": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "create new index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "in case if some queries fail consider installing libdeeplake manually": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "import os": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "apify.md": "https://python.langchain.com/v0.2/docs/integrations/tools/apify/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "See docker command above to launch a postgres instance with pgvector enabled.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvector/", "default metric is angular": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Pip install necessary package {#pip-install-necessary-package}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "@markdown Please specify a source for demo purpose.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_firestore/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/milvus/", "Metadata {#metadata}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "Run tests with shell:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "nuclia_transformer.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/nuclia_transformer/", "ai21_semantic_text_splitter.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/ai21_semantic_text_splitter/", "Must be an OpenAI model that supports functions": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/", "doctran_extract_properties.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_extract_properties/", "google_translate.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_translate/", "doctran_interrogate_document.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_interrogate_document/", "doctran_translate_document.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_translate_document/", "Feature structure of `mlqa/en` dataset:": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tensorflow_datasets/", "@markdown Please fill in the both the Google Cloud region and name of your Cloud SQL instance.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_cloud_sql_mssql/", "airbyte_salesforce.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_salesforce/", "airbyte_cdk.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_cdk/", "airbyte_stripe.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_stripe/", "copypaste.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/copypaste/", "airbyte_typeform.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_typeform/", "apify_dataset.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_datastore/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "airbyte_hubspot.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_hubspot/", "airbyte_gong.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_gong/", "@markdown Please specify an endpoint associated with the instance and a key prefix for demo purpose.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_memorystore_redis/", "@markdown Please specify an instance and a table for demo purpose.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_bigtable/", "@title Set Your Values Here { display-mode: \"form\" }": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_el_carro/", "airbyte_shopify.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_shopify/", "airbyte_zendesk_support.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_zendesk_support/", "@markdown Please specify an instance id, a database, and a table for demo purpose.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_spanner/", "The underlying python-gremlin has a problem when running in notebook": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/", "sagemaker.md": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/quickstart/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/constructing/", "Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "indexing.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/indexing/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "Custom Retriever {#custom-retriever}": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/custom_retriever/", "Define your embedding model": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/time_weighted_vectorstore/", "This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/", "Self-querying": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/self_query-checkpoint/", "Custom Document Loader {#custom-document-loader}": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/custom/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "BaseStore": {"fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/"}, "VectorStore": {"fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/"}, "InMemoryStore": {"fleet_context.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "This text splitter is used to create the child documents": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/parent_document_retriever/"}, "ContextualCompressionRetriever": {"Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "Get 3 diff embeddings.": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/"}, "LLMLinguaCompressor": {"Helper function for printing docs": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/"}, "RetrievalQA": {"Helper function for printing docs": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "bedrock.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bedrock/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Here we want to make sure the answer is minimally relevant": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "scann.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "TODO : Set values as per your requirements": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "or shorter": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "Clean up KDB.AI \"documents\" table and index for similarity search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "DocumentDB connection string": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "initialize MongoDB python client": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/"}, "ElasticSearchBM25Retriever": {"Alternatively, you can load an existing index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elastic_search_bm25/"}, "OutlineRetriever": {"outline.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/outline/", "Outline": "https://python.langchain.com/v0.2/docs/integrations/providers/outline/"}, "ConversationalRetrievalChain": {"outline.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/outline/", "get a token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/retrievers/arxiv/", "Setup API keys for Kay and OpenAI": "https://python.langchain.com/v0.2/docs/integrations/retrievers/sec_filings/", "Setup API key": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kay/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/"}, "ZepMemory": {"Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "Zep": "https://python.langchain.com/v0.2/docs/integrations/providers/zep/"}, "SearchScope": {"Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/"}, "SearchType": {"Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/"}, "ZepRetriever": {"Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "Zep": "https://python.langchain.com/v0.2/docs/integrations/providers/zep/"}, "VespaRetriever": {"vespa.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/vespa/", "Vespa": "https://python.langchain.com/v0.2/docs/integrations/providers/vespa/"}, "AmazonKendraRetriever": {"amazon_kendra_retriever.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/amazon_kendra_retriever/"}, "AmazonKnowledgeBasesRetriever": {"bedrock.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bedrock/"}, "Bedrock": {"bedrock.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bedrock/", "Guardrails for Amazon Bedrock with trace": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/"}, "CohereRerank": {"OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/"}, "Cohere": {"OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/"}, "NeuralDBRetriever": {"From scratch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/thirdai_neuraldb/"}, "SingleStoreDB": {"Establishing a connection to the database is facilitated through the singlestoredb Python connector.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/providers/singlestoredb/"}, "WikipediaRetriever": {"get a token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/retrievers/wikipedia/", "Wikipedia": "https://python.langchain.com/v0.2/docs/integrations/providers/wikipedia/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/"}, "MetalRetriever": {"metal.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/metal/", "Metal": "https://python.langchain.com/v0.2/docs/integrations/providers/metal/"}, "BreebsRetriever": {"breebs.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/breebs/", "Breebs (Open Knowledge)": "https://python.langchain.com/v0.2/docs/integrations/providers/breebs/"}, "CSVLoader": {"STEP 1: Load": "https://python.langchain.com/v0.2/docs/integrations/retrievers/chatgpt-plugin/", "csv.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/csv/", "pebblo.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pebblo/", "CSV": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/csv/"}, "ChatGPTPluginRetriever": {"STEP 1: Load": "https://python.langchain.com/v0.2/docs/integrations/retrievers/chatgpt-plugin/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/"}, "KayAiRetriever": {"Setup API keys for Kay and OpenAI": "https://python.langchain.com/v0.2/docs/integrations/retrievers/sec_filings/", "Setup API key": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kay/"}, "ChatCohere": {"cohere.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/chat/cohere/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/quick_start/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/"}, "CohereRagRetriever": {"cohere.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/"}, "DriaRetriever": {"Installation {#installation}": "https://python.langchain.com/v0.2/docs/integrations/retrievers/dria_index/"}, "DocArrayRetriever": {"initialize the index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/docarray_retriever/"}, "SVMRetriever": {"svm.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/svm/"}, "TavilySearchAPIRetriever": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tavily/"}, "PineconeHybridSearchRetriever": {"create the index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/pinecone_hybrid_search/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/providers/pinecone/"}, "DeepLake": {"# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "in case if some queries fail consider installing libdeeplake manually": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "Activeloop Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/providers/activeloop_deeplake/", "or shorter": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/"}, "AsyncHtmlLoader": {"# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "html2text.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/html2text/", "async_html.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/async_html/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "Html2TextTransformer": {"# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "html2text.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/html2text/", "async_chromium.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/async_chromium/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "create_structured_output_chain": {"# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/mapping/"}, "HumanMessagePromptTemplate": {"# activeloop token is needed if you are not signed in using CLI: `activeloop login -u -p `": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "get a chat completion from the formatted messages": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "see https://python.langchain.com/en/latest/modules/data_connection/getting_started.html for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "Prompts": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/prompts-checkpoint/", "Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/", "Prompt templates": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/prompt_templates/.ipynb_checkpoints/index-checkpoint/"}, "PubMedRetriever": {"pubmed.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/pubmed/", "PubMed": "https://python.langchain.com/v0.2/docs/integrations/providers/pubmed/"}, "WeaviateHybridSearchRetriever": {"client.schema.delete_all()": "https://python.langchain.com/v0.2/docs/integrations/retrievers/weaviate-hybrid/"}, "EmbedchainRetriever": {"Installation {#installation}": "https://python.langchain.com/v0.2/docs/integrations/retrievers/embedchain/"}, "create_retrieval_chain": {"ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/chat_history/"}, "create_stuff_documents_chain": {"ragatouille.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/chat_history/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/quickstart/"}, "ArxivRetriever": {"get a token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/retrievers/arxiv/", "Arxiv": "https://python.langchain.com/v0.2/docs/integrations/providers/arxiv/"}, "BM25Retriever": {"bm25.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bm25/", "Loading the COMVEST 2024 notice": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "initialize the bm25 retriever and faiss retriever": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/ensemble/"}, "QdrantSparseVectorRetriever": {"Create a retriever with a demo encoder": "https://python.langchain.com/v0.2/docs/integrations/retrievers/qdrant-sparse/"}, "DeterministicFakeEmbedding": {"elasticsearch_retriever.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/"}, "Embeddings": {"elasticsearch_retriever.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/", "Ensure that all we need is installed": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/infinispanvs/"}, "ElasticsearchRetriever": {"elasticsearch_retriever.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/"}, "ArceeRetriever": {"Define filters": "https://python.langchain.com/v0.2/docs/integrations/retrievers/arcee/", "Arcee": "https://python.langchain.com/v0.2/docs/integrations/providers/arcee/"}, "FlashrankRerank": {"OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/"}, "ChaindeskRetriever": {"chaindesk.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/chaindesk/", "Chaindesk": "https://python.langchain.com/v0.2/docs/integrations/providers/chaindesk/"}, "MergerRetriever": {"Get 3 diff embeddings.": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/"}, "EmbeddingsClusteringFilter": {"Get 3 diff embeddings.": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/"}, "EmbeddingsRedundantFilter": {"Get 3 diff embeddings.": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/"}, "LongContextReorder": {"Get 3 diff embeddings.": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "Get embeddings.": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/long_context_reorder/"}, "TFIDFRetriever": {"tf_idf.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tf_idf/"}, "GoogleVertexAIMultiTurnSearchRetriever": {"google_vertex_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/google_vertex_ai_search/"}, "GoogleVertexAISearchRetriever": {"google_vertex_ai_search.md": "https://python.langchain.com/v0.2/docs/integrations/retrievers/google_vertex_ai_search/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/"}, "Milvus": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/milvus_self_query/", "Milvus": "https://python.langchain.com/v0.2/docs/integrations/providers/milvus/", "Zilliz": "https://python.langchain.com/v0.2/docs/integrations/providers/zilliz/", "replace": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/", "This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/milvus/"}, "AttributeInfo": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "create DashVector collection": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "or install latest:": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "Get openAI api key by reading local .env file": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "create new index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "in case if some queries fail consider installing libdeeplake manually": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "import os": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/", "This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/", "Self-querying": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/self_query-checkpoint/"}, "SelfQueryRetriever": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "create DashVector collection": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "or install latest:": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "Get openAI api key by reading local .env file": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "create new index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "in case if some queries fail consider installing libdeeplake manually": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "import os": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/chroma-checkpoint/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/", "Self-querying": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/self_query-checkpoint/"}, "OpenAI": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "or install latest:": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "Get openAI api key by reading local .env file": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "create new index": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "in case if some queries fail consider installing libdeeplake manually": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "import os": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "use it with langchain {#use-it-with-langchain}": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "google_serper.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "Answer with 'Zhu'": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "google_finance.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "awslambda.md": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "folder_id='1yucgL9WGgWZdM1TOuKkeghlPizuzMYb5'": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "openweathermap.md": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "search_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/search_tools/", "eleven_labs_tts.md": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "send data into the chain": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/", "lemonai.md": "https://python.langchain.com/v0.2/docs/integrations/tools/lemonai/", "graphql.md": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "searchapi.md": "https://python.langchain.com/v0.2/docs/integrations/tools/searchapi/", "gradio_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/gradio_tools/", "sceneXplain.md": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "Needed if you would like to display images in the notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "Based on ReAct Agent": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "loads previous state from Mot\u00f6rhead \ud83e\udd18": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/", "Here we want to make sure the answer is minimally relevant": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Fiddler project and model names, used for model registration": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "LLM Hyperparameters": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "labelstudio.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "Connect to Comet if no API Key is set": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "promptlayer.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint/", "trubrics.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "Install necessary dependencies.": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "From LangChain, import standard modules for prompting.": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "LangChain Decorators \u2728": "https://python.langchain.com/v0.2/docs/integrations/providers/langchain_decorators/", "!pip3 install rebuff openai -U": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "SCENARIO 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Helicone": "https://python.langchain.com/v0.2/docs/integrations/providers/helicone/", "Shale Protocol": "https://python.langchain.com/v0.2/docs/integrations/providers/shaleprotocol/", "you don't need to call close to write profiles to WhyLabs, upload will occur periodically, but to demo let's not wait.": "https://python.langchain.com/v0.2/docs/integrations/providers/whylabs_profiling/", "wandb documentation to configure wandb using env variables": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "Setup and use the ClearML Callback": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "0: Import ray serve and request from starlette": "https://python.langchain.com/v0.2/docs/integrations/providers/ray_serve/", "Create a dataframe": "https://python.langchain.com/v0.2/docs/integrations/toolkits/csv/", "xorbits.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/xorbits/", "jira.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/", "in apache-spark root directory. (tested here with \"spark-3.4.0-bin-hadoop3 and later\")": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark/", "For Windows/Linux": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_cognitive_services/", "Select the LLM to use. Here, we use gpt-3.5-turbo-instruct": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/", "steam.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/", "json.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/json/", "Copilot Sandbox": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/office365/", "pandas.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/pandas/", "nasa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/", "azure_ai_services.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_ai_services/", "NOTE: In this example. We must set `allow_dangerous_request=True` to enable the OpenAPI Agent to automatically use the Request Tool.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "initialize marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "DocumentDB connection string": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "initialize MongoDB python client": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "Uncomment this to install psychicapi if you don't already have it installed": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "You can store your OPENAI_API_KEY in a .env file as well": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/amazon_textract/", "networkx.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/", "get a token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/llms/openai/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/llm_caching/", "Layerup Security": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/layerup_security/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Note that we set max_retries = 0 to avoid retrying on RateLimits, etc": "https://python.langchain.com/v0.2/docs/guides/productionization/fallbacks/", "Using https://huggingface.co/laiyer/deberta-v3-base-prompt-injection": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/hugging_face_prompt_injection/", "Logical Fallacy chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/logical_fallacy_chain/", "Constitutional chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/constitutional_chain/", "moderation.md": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/moderation/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/", "Get embeddings.": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/long_context_reorder/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/", "Self-querying": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/self_query-checkpoint/", "agent_with_memory_in_db.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory_in_db/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "!python -m spacy download en_core_web_lg": "https://python.langchain.com/v0.2/docs/modules/memory/custom_memory/", "adding_memory_chain_multiple_inputs.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory_chain_multiple_inputs/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "Combined": "https://python.langchain.com/v0.2/docs/modules/memory/multiple_memory/", "Here it is by default set to \"AI\"": "https://python.langchain.com/v0.2/docs/modules/memory/conversational_customization/", "agent_with_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory/", "kg.md": "https://python.langchain.com/v0.2/docs/modules/memory/types/kg/", "We can see here that the buffer is updated": "https://python.langchain.com/v0.2/docs/modules/memory/types/token_buffer/", "Entity": "https://python.langchain.com/v0.2/docs/modules/memory/types/entity_summary_memory/", "Conversation Summary": "https://python.langchain.com/v0.2/docs/modules/memory/types/summary/", "Backed by a Vector Store": "https://python.langchain.com/v0.2/docs/modules/memory/types/vectorstore_retriever_memory/", "Conversation Buffer Window": "https://python.langchain.com/v0.2/docs/modules/memory/types/buffer_window/", "Conversation Buffer": "https://python.langchain.com/v0.2/docs/modules/memory/types/buffer/", "We can see here that there is a summary of the conversation and then some previous interactions": "https://python.langchain.com/v0.2/docs/modules/memory/types/summary_buffer/", "Callbacks": "https://python.langchain.com/v0.2/docs/modules/callbacks/index/", "First, define custom callback handler implementations": "https://python.langchain.com/v0.2/docs/modules/callbacks/multiple_callbacks/", "You can kick off concurrent runs from within the context manager": "https://python.langchain.com/v0.2/docs/modules/callbacks/token_counting/", "this chain will both print to stdout (because verbose=True) and write to 'output.log'": "https://python.langchain.com/v0.2/docs/modules/callbacks/filecallbackhandler/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/handle_parsing_errors/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/", "Define your desired data structure.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/quick_start/", "retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/", "datetime.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/datetime/", "token_usage_tracking.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/token_usage_tracking/", "streaming_llm.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/streaming_llm/", "Quick Start {#quick-start}": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/quick_start/", "batch_configurable_chain([\"ice cream\", \"spaghetti\", \"dumplings\"])": "https://python.langchain.com/v0.2/docs/expression_language/why/", "> ChatPromptValue(messages=[HumanMessage(content='tell me a short joke about ice cream')])": "https://python.langchain.com/v0.2/docs/expression_language/get_started/"}, "PGVector": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pgvector_self_query/", "PGVector": "https://python.langchain.com/v0.2/docs/integrations/providers/pgvector/", "See docker command above to launch a postgres instance with pgvector enabled.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvector/"}, "Weaviate": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/weaviate_self_query/"}, "Vectara": {"Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vectara/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/index/"}, "DashVector": {"create DashVector collection": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "DashVector": "https://python.langchain.com/v0.2/docs/integrations/providers/dashvector/", "add texts": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/"}, "Tongyi": {"create DashVector collection": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/tongyi/"}, "DatabricksVectorSearch": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/databricks_vector_search/", "databricks_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/"}, "Dingo": {"or install latest:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/providers/dingo/"}, "OpenSearchVectorSearch": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/opensearch_self_query/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/providers/opensearch/", "If using the default Docker installation, use this instantiation instead:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/"}, "ElasticsearchStore": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/elasticsearch_self_query/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/providers/elasticsearch/", "Metadata {#metadata}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "indexing.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/indexing/"}, "ConnectionParams": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/tencentvectordb/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "baiduvectordb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/"}, "MetaField": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/tencentvectordb/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/"}, "TencentVectorDB": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/tencentvectordb/", "Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/", "from langchain_community.embeddings.openai import OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/"}, "TimescaleVector": {"Get openAI api key by reading local .env file": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/"}, "AstraDB": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/astradb/"}, "SupabaseVectorStore": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/supabase_self_query/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/providers/supabase/", "with pip": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/"}, "Redis": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/redis_self_query/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/redis/", "connection to redis standalone at localhost, db 0, no password": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "MyScale": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/myscale_self_query/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/providers/myscale/", "use directly a `where_str` to delete": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/"}, "MongoDBAtlasVectorSearch": {"This example only specifies a relevant query": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "initialize MongoDB python client": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/"}, "Qdrant": {"import os": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "Qdrant": "https://python.langchain.com/v0.2/docs/integrations/providers/qdrant/", "qdrant.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/qdrant/", "Vector stores": "https://python.langchain.com/v0.2/docs/modules/data_connection/vectorstores/index/"}, "AzureMLOnlineEndpoint": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "AzureOpenAI": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "The API version you want to use: set this to `2023-12-01-preview` for the released version.": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_openai/"}, "AzureChatOpenAI": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "azure_chat_openai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/azure_chat_openai/", "The underlying python-gremlin has a problem when running in notebook": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "AzureAIDataLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Create a connection to your project": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_ai_data/"}, "AzureAIDocumentIntelligenceLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "microsoft_word.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_word/", "microsoft_excel.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_excel/", "microsoft_powerpoint.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_powerpoint/", "azure_document_intelligence.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_document_intelligence/", "Microsoft Office": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/office_file/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/", "HTML": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/html/"}, "AzureBlobStorageContainerLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "azure_blob_storage_container.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_blob_storage_container/"}, "AzureBlobStorageFileLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "azure_blob_storage_file.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_blob_storage_file/"}, "OneDriveLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "microsoft_onedrive.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_onedrive/"}, "UnstructuredWordDocumentLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "microsoft_word.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_word/"}, "UnstructuredExcelLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "microsoft_excel.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_excel/"}, "SharePointLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "loads documents from root directory": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_sharepoint/"}, "UnstructuredPowerPointLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "microsoft_powerpoint.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_powerpoint/"}, "OneNoteLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "microsoft_onenote.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_onenote/"}, "AzureCosmosDBVectorSearch": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "Set up the OpenAI Environment Variables": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/"}, "O365Toolkit": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/office365/"}, "PowerBIToolkit": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "fictional example": "https://python.langchain.com/v0.2/docs/integrations/toolkits/powerbi/"}, "PowerBIDataset": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "fictional example": "https://python.langchain.com/v0.2/docs/integrations/toolkits/powerbi/"}, "BingSearchAPIWrapper": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/", "bing_search.md": "https://python.langchain.com/v0.2/docs/integrations/tools/bing_search/"}, "PresidioAnonymizer": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Download model": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/index/"}, "PresidioReversibleAnonymizer": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "Multi-language data anonymization with Microsoft Presidio {#multi-language-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/multi_language/", "Reversible data anonymization with Microsoft Presidio {#reversible-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/reversible/", "Download model": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/index/"}, "AmazonAPIGateway": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "These are sample parameters for Falcon 40B Instruct Deployed from Amazon SageMaker JumpStart": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/"}, "ContentHandlerBase": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "S3DirectoryLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "aws_s3_directory.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/aws_s3_directory/"}, "S3FileLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "aws_s3_file.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/aws_s3_file/"}, "AmazonTextractPDFLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "You can store your OPENAI_API_KEY in a .env file as well": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/amazon_textract/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "AthenaLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "athena.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/athena/"}, "DocumentDBVectorSearch": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "DynamoDBChatMessageHistory": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/memory/aws_dynamodb/"}, "SageMakerCallbackHandler": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "LLM Hyperparameters": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/"}, "AmazonComprehendModerationChain": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "ChatHuggingFace": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/"}, "HuggingFacePipeline": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "huggingface_pipelines.md": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_pipelines/", "lmformatenforcer_experimental.md": "https://python.langchain.com/v0.2/docs/integrations/llms/lmformatenforcer_experimental/", "We'll choose a regex that matches to a structured json string that looks like:": "https://python.langchain.com/v0.2/docs/integrations/llms/rellm_experimental/", "mlx_pipelines.md": "https://python.langchain.com/v0.2/docs/integrations/llms/mlx_pipelines/", "jsonformer_experimental.md": "https://python.langchain.com/v0.2/docs/integrations/llms/jsonformer_experimental/", "openvino.md": "https://python.langchain.com/v0.2/docs/integrations/llms/openvino/", "weight_only_quantization.md": "https://python.langchain.com/v0.2/docs/integrations/llms/weight_only_quantization/"}, "HuggingFaceDatasetLoader": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "hugging_face_dataset.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/hugging_face_dataset/"}, "load_huggingface_tool": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "Requires transformers>=4.29.0 and huggingface_hub>=0.14.1": "https://python.langchain.com/v0.2/docs/integrations/tools/huggingface_tools/"}, "ChatGPTLoader": {"OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "chatgpt_loader.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/chatgpt_loader/"}, "DallEAPIWrapper": {"OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "Needed if you would like to display images in the notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/"}, "OpenAIModerationChain": {"OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "moderation.md": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/moderation/"}, "GoogleGenerativeAI": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_ai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_ai/"}, "VertexAIModelGarden": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_vertex_ai_palm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/"}, "ChatGoogleGenerativeAI": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Note that each chunk may contain more than one \"token\"": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/"}, "ChatVertexAI": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "for running these examples in the notebook:": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "google_vertex_ai_palm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "structured_output.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/structured_output/", "response_metadata.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/response_metadata/"}, "BigQueryLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/google-checkpoint/", "Note that the `id` column is being returned twice, with one instance aliased as `source`": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_bigquery/"}, "GCSDirectoryLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/google-checkpoint/", "google_cloud_storage_directory.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_cloud_storage_directory/"}, "GCSFileLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/google-checkpoint/", "google_cloud_storage_file.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_cloud_storage_file/"}, "GoogleDriveLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/google-checkpoint/", "folder_id='1yucgL9WGgWZdM1TOuKkeghlPizuzMYb5'": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_drive/"}, "GoogleSpeechToTextLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "or a local file path: file_path = \"./audio.wav\"": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_speech_to_text/"}, "Blob": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_docai.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_docai/", "Configure the parsers that you want to use per mime-type!": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_files/", "Custom Document Loader {#custom-document-loader}": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/custom/"}, "DocAIParser": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_docai.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_docai/"}, "GoogleTranslateTransformer": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_translate.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_translate/"}, "BigQueryVectorSearch": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_bigquery_vector_search/"}, "VectorSearchVectorStore": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "TODO : Set values as per your requirements": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/"}, "ScaNN": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "scann.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/"}, "GoogleDocumentAIWarehouseRetriever": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/"}, "GoogleCloudTextToSpeechTool": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_cloud_texttospeech.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_cloud_texttospeech/"}, "GoogleFinanceQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_finance.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/"}, "GoogleFinanceAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_finance.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/"}, "GoogleJobsQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "use it with langchain {#use-it-with-langchain}": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/"}, "GoogleLensQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Runs google lens on an image of Danny Devito": "https://python.langchain.com/v0.2/docs/integrations/tools/google_lens/"}, "GoogleLensAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Runs google lens on an image of Danny Devito": "https://python.langchain.com/v0.2/docs/integrations/tools/google_lens/"}, "GooglePlacesTool": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_places.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_places/"}, "GoogleScholarQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_scholar.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_scholar/"}, "GoogleScholarAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_scholar.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_scholar/"}, "GoogleSearchAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/google-checkpoint/", "google_search.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_search/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/", "agent_with_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory/"}, "GoogleTrendsQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_trends.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_trends/"}, "GoogleTrendsAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_trends.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_trends/"}, "GmailToolkit": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gmail/"}, "SearchApiAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "searchapi.md": "https://python.langchain.com/v0.2/docs/integrations/tools/searchapi/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/"}, "SerpAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "You can create the tool to pass to an agent": "https://python.langchain.com/v0.2/docs/integrations/tools/serpapi/", "setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "SerpAPI": "https://python.langchain.com/v0.2/docs/integrations/providers/serpapi/", "Initialize the language model": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/examples/comparisons/", "agent_with_memory_in_db.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory_in_db/"}, "GoogleSerperAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "google_serper.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/"}, "YouTubeSearchTool": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "youtube.md": "https://python.langchain.com/v0.2/docs/integrations/tools/youtube/"}, "YoutubeAudioLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/"}, "OpenAIWhisperParser": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/"}, "YoutubeLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "YouTube": "https://python.langchain.com/v0.2/docs/integrations/providers/youtube/", "Init the GoogleApiClient": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_transcript/", "%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/quickstart/", "%pip install -qU langchain langchain-openai youtube-transcript-api pytube": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/structuring/"}, "AnthropicLLM": {"Anthropic": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/anthropic-checkpoint/", "anthropic.md": "https://python.langchain.com/v0.2/docs/integrations/llms/anthropic/"}, "MatchingEngine": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/google-checkpoint/"}, "AzureCognitiveSearchRetriever": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/.ipynb_checkpoints/microsoft-checkpoint/"}, "AIPluginTool": {"chatgpt_plugins.md": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/"}, "DataForSeoAPIWrapper": {"dataforseo.md": "https://python.langchain.com/v0.2/docs/integrations/tools/dataforseo/", "DataForSEO": "https://python.langchain.com/v0.2/docs/integrations/providers/dataforseo/"}, "Tool": {"dataforseo.md": "https://python.langchain.com/v0.2/docs/integrations/tools/dataforseo/", "You can create the tool to pass to an agent": "https://python.langchain.com/v0.2/docs/integrations/tools/serpapi/", "google_serper.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "searchapi.md": "https://python.langchain.com/v0.2/docs/integrations/tools/searchapi/", "google_search.md": "https://python.langchain.com/v0.2/docs/integrations/tools/google_search/", "Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "Based on ReAct Agent": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "Pydantic compatibility": "https://python.langchain.com/v0.2/docs/guides/development/pydantic_compatibility/", "Initialize the language model": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/examples/comparisons/", "agent_with_memory_in_db.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory_in_db/", "agent_with_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory/", "need to use GPT-4 here as GPT-3.5 does not understand, however hard you insist, that": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_iter/"}, "ConneryService": {"Specify your Connery Runner credentials.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/"}, "DataheraldAPIWrapper": {"dataherald.md": "https://python.langchain.com/v0.2/docs/integrations/tools/dataherald/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/dataherald-checkpoint/"}, "SearxSearchWrapper": {"searx_search.md": "https://python.langchain.com/v0.2/docs/integrations/tools/searx_search/", "SearxNG Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/searx/"}, "PythonREPL": {"You can create the tool to pass to an agent": "https://python.langchain.com/v0.2/docs/integrations/tools/python/", "code_writing.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/code_writing/"}, "GoogleJobsAPIWrapper": {"use it with langchain {#use-it-with-langchain}": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/"}, "InfobipAPIWrapper": {"How to use it inside an Agent {#how-to-use-it-inside-an-agent}": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/"}, "StructuredTool": {"How to use it inside an Agent {#how-to-use-it-inside-an-agent}": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/", "Import things that are needed generically": "https://python.langchain.com/v0.2/docs/modules/tools/custom_tools/"}, "E2BDataAnalysisTool": {"Artifacts are charts created by matplotlib when `plt.show()` is called": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/", "openai_assistants.md": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_assistants/"}, "SQLDatabase": {"In order to build a selectable on SA's Core API, you need a table definition.": "https://python.langchain.com/v0.2/docs/integrations/tools/sql_database/", "CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "!pip3 install rebuff openai -U": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Using LangSmith is recommended but not required. Uncomment below lines to use.": "https://python.langchain.com/v0.2/docs/use_cases/sql/csv/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/"}, "HumanInputRun": {"Answer with 'Zhu'": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/"}, "NucliaUnderstandingAPI": {"nuclia.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/nuclia/", "Nuclia": "https://python.langchain.com/v0.2/docs/integrations/providers/nuclia/", "nuclia_transformer.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/nuclia_transformer/"}, "YahooFinanceNewsTool": {"How YahooFinanceNewsTool works? {#how-yahoofinancenewstool-works}": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/"}, "WikidataAPIWrapper": {"wikidata.md": "https://python.langchain.com/v0.2/docs/integrations/tools/wikidata/"}, "WikidataQueryRun": {"wikidata.md": "https://python.langchain.com/v0.2/docs/integrations/tools/wikidata/"}, "TwilioAPIWrapper": {"twilio.md": "https://python.langchain.com/v0.2/docs/integrations/tools/twilio/"}, "IFTTTWebhook": {"ifttt.md": "https://python.langchain.com/v0.2/docs/integrations/tools/ifttt/"}, "SemanticScholarQueryRun": {"start by installing semanticscholar api": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/"}, "WikipediaQueryRun": {"wikipedia.md": "https://python.langchain.com/v0.2/docs/integrations/tools/wikipedia/", "index.md": "https://python.langchain.com/v0.2/docs/modules/tools/index/", "pip install wikipedia": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/intermediate_steps/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/max_iterations/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/"}, "WikipediaAPIWrapper": {"wikipedia.md": "https://python.langchain.com/v0.2/docs/integrations/tools/wikipedia/", "Set this to your Zep server URL": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "index.md": "https://python.langchain.com/v0.2/docs/modules/tools/index/", "pip install wikipedia": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/intermediate_steps/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/max_iterations/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/"}, "AlphaVantageAPIWrapper": {"alpha_vantage.md": "https://python.langchain.com/v0.2/docs/integrations/tools/alpha_vantage/"}, "StackExchangeAPIWrapper": {"stackexchange.md": "https://python.langchain.com/v0.2/docs/integrations/tools/stackexchange/", "Stack Exchange": "https://python.langchain.com/v0.2/docs/integrations/providers/stackexchange/"}, "TextRequestsWrapper": {"Each tool wrapps a requests wrapper": "https://python.langchain.com/v0.2/docs/integrations/tools/requests/"}, "OpenWeatherMapAPIWrapper": {"openweathermap.md": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "OpenWeatherMap": "https://python.langchain.com/v0.2/docs/integrations/providers/openweathermap/"}, "get_from_env": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/"}, "NutritionAI": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/"}, "NutritionAIAPI": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/"}, "PubmedQueryRun": {"pubmed.md": "https://python.langchain.com/v0.2/docs/integrations/tools/pubmed/"}, "ConversationBufferMemory": {"memorize.md": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "gradio_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/gradio_tools/", "sceneXplain.md": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "Or via the async API": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "Guardrails for Amazon Bedrock with trace": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "adding_memory_chain_multiple_inputs.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory_chain_multiple_inputs/", "adding_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory/", "Combined": "https://python.langchain.com/v0.2/docs/modules/memory/multiple_memory/", "Here it is by default set to \"AI\"": "https://python.langchain.com/v0.2/docs/modules/memory/conversational_customization/", "Conversation Buffer": "https://python.langchain.com/v0.2/docs/modules/memory/types/buffer/"}, "GradientLLM": {"memorize.md": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "Gradient": "https://python.langchain.com/v0.2/docs/integrations/providers/gradient/", "Improve the results by fine-tuning (optional) {#improve-the-results-by-fine-tuning-optional}": "https://python.langchain.com/v0.2/docs/integrations/llms/gradient/"}, "ElevenLabsText2SpeechTool": {"eleven_labs_tts.md": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "ElevenLabs": "https://python.langchain.com/v0.2/docs/integrations/providers/elevenlabs/"}, "BearlyInterpreterTool": {"Extract pdf content": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/"}, "VectorstoreIndexCreator": {"apify.md": "https://python.langchain.com/v0.2/docs/integrations/tools/apify/", "hugging_face_dataset.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/hugging_face_dataset/", "Create a vectorstore retriever from the loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/modern_treasury/", "image_captions.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image_captions/", "see https://python.langchain.com/en/latest/modules/data_connection/getting_started.html for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "apify_dataset.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/"}, "ApifyWrapper": {"apify.md": "https://python.langchain.com/v0.2/docs/integrations/tools/apify/", "Apify": "https://python.langchain.com/v0.2/docs/integrations/providers/apify/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "ZapierToolkit": {"get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/"}, "ZapierNLAWrapper": {"get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/"}, "SimpleSequentialChain": {"get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "LLM Hyperparameters": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "!pip3 install rebuff openai -U": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/predibase/", "Run the chain specifying only the input variable for the first chain.": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "magics to auto-reload external modules in case you are making changes to langchain while working on this notebook": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/"}, "TransformChain": {"get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "!pip3 install rebuff openai -U": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/"}, "ZapierNLARunAction": {"get from https://platform.openai.com/": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/"}, "RivaASR": {"send data into the chain": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "RivaTTS": {"send data into the chain": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "RivaAudioEncoding": {"send data into the chain": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "AudioStream": {"send data into the chain": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "GoldenQueryAPIWrapper": {"golden_query.md": "https://python.langchain.com/v0.2/docs/integrations/tools/golden_query/", "Golden": "https://python.langchain.com/v0.2/docs/integrations/providers/golden/"}, "create_react_agent": {"arxiv.md": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "Based on ReAct Agent": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/streamlit/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/dataherald-checkpoint/", "Define the neural network": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "agent_with_memory_in_db.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory_in_db/", "agent_with_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/max_iterations/"}, "ArxivAPIWrapper": {"arxiv.md": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/"}, "DuckDuckGoSearchRun": {"ddg.md": "https://python.langchain.com/v0.2/docs/integrations/tools/ddg/", "Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "openai_assistants.md": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_assistants/"}, "DuckDuckGoSearchAPIWrapper": {"ddg.md": "https://python.langchain.com/v0.2/docs/integrations/tools/ddg/"}, "SceneXplainTool": {"sceneXplain.md": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/"}, "WolframAlphaAPIWrapper": {"wolfram_alpha.md": "https://python.langchain.com/v0.2/docs/integrations/tools/wolfram_alpha/", "Wolfram Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/wolfram_alpha/"}, "RunnableParallel": {"and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/", "Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/", "> ChatPromptValue(messages=[HumanMessage(content='tell me a short joke about ice cream')])": "https://python.langchain.com/v0.2/docs/expression_language/get_started/", "The input schema of the chain is the input schema of its first part, the prompt.": "https://python.langchain.com/v0.2/docs/expression_language/interface/", "prompt_llm_parser.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_llm_parser/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/", "Adding values to chain state {#adding-values-to-chain-state}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/assign/", "Formatting inputs & output {#formatting-inputs-output}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/parallel/", "Passing data through {#passing-data-through}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/passthrough/", "Chaining runnables {#chaining-runnables}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/sequence/"}, "ExaSearchRetriever": {"and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "exa_search.md": "https://python.langchain.com/v0.2/docs/integrations/providers/exa_search/"}, "TextContentsOptions": {"and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/"}, "OpenAIFunctionsAgent": {"and some deps for this notebook": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Install package": "https://python.langchain.com/v0.2/docs/integrations/toolkits/robocorp/"}, "EdenAiExplicitImageTool": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiObjectDetectionTool": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiParsingIDTool": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiParsingInvoiceTool": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiSpeechToTextTool": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiTextModerationTool": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiTextToSpeechTool": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAI": {"edenai_tools.md": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/", "Run the chain specifying only the input variable for the first chain.": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/"}, "RedditSearchRun": {"Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "RedditSearchAPIWrapper": {"Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "RedditSearchSchema": {"Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "StructuredChatAgent": {"Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "ReadOnlySharedMemory": {"Adapted code from /docs/modules/agents/how_to/sharedmemory_for_tools": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "YouSearchTool": {"For use in Chaining section": "https://python.langchain.com/v0.2/docs/integrations/tools/you/"}, "ShellTool": {"bash.md": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/"}, "PolygonAggregates": {"Get the last quote for ticker": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonFinancials": {"Get the last quote for ticker": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonLastQuote": {"Get the last quote for ticker": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonTickerNews": {"Get the last quote for ticker": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonAPIWrapper": {"Get the last quote for ticker": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/toolkits/polygon/"}, "PolygonAggregatesSchema": {"Get the last quote for ticker": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "FileManagementToolkit": {"We'll make a temporary directory to avoid clutter": "https://python.langchain.com/v0.2/docs/integrations/tools/filesystem/"}, "BraveSearch": {"brave_search.md": "https://python.langchain.com/v0.2/docs/integrations/tools/brave_search/", "Brave Search": "https://python.langchain.com/v0.2/docs/integrations/providers/brave_search/"}, "RedisChatMessageHistory": {"redis_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "agent_with_memory_in_db.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory_in_db/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/"}, "RunnableWithMessageHistory": {"redis_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/memory/sqlite/", "Optionally, specify your own session_state key for storing messages": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "copy from tidb cloud console": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/chat_history/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "agent_with_memory_in_db.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory_in_db/", "agent_with_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/"}, "ElasticsearchChatMessageHistory": {"If using Elastic Cloud:": "https://python.langchain.com/v0.2/docs/integrations/memory/elasticsearch_chat_message_history/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/providers/elasticsearch/"}, "UpstashRedisChatMessageHistory": {"upstash_redis_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/upstash_redis_chat_message_history/", "Upstash Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/upstash/"}, "SingleStoreDBChatMessageHistory": {"singlestoredb_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/singlestoredb_chat_message_history/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/providers/singlestoredb/"}, "PostgresChatMessageHistory": {"postgres_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/postgres_chat_message_history/"}, "MomentoChatMessageHistory": {"momento_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/momento_chat_message_history/", "Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/"}, "XataChatMessageHistory": {"xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/providers/xata/"}, "XataVectorStore": {"xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "xata.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/"}, "create_retriever_tool": {"xata_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/agents/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/"}, "CassandraChatMessageHistory": {"cassandra_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/cassandra_chat_message_history/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/"}, "SQLChatMessageHistory": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/memory/sqlite/", "SQLite": "https://python.langchain.com/v0.2/docs/integrations/providers/sqlite/"}, "MotorheadMemory": {"loads previous state from Mot\u00f6rhead \ud83e\udd18": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/", "Mot\u00f6rhead": "https://python.langchain.com/v0.2/docs/integrations/providers/motorhead/"}, "AstraDBChatMessageHistory": {"astradb_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/astradb_chat_message_history/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/"}, "StreamlitChatMessageHistory": {"Optionally, specify your own session_state key for storing messages": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/providers/streamlit/"}, "Neo4jChatMessageHistory": {"neo4j_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/neo4j_chat_message_history/"}, "TiDBChatMessageHistory": {"copy from tidb cloud console": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "TiDB": "https://python.langchain.com/v0.2/docs/integrations/providers/tidb/"}, "RocksetChatMessageHistory": {"rockset_chat_message_history.md": "https://python.langchain.com/v0.2/docs/integrations/memory/rockset_chat_message_history/", "Rockset": "https://python.langchain.com/v0.2/docs/integrations/providers/rockset/"}, "HuggingFaceTextGenInference": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/"}, "HuggingFaceEndpoint": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "get a token: https://huggingface.co/docs/api-inference/quicktour#get-your-api-token": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/"}, "HuggingFaceHub": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/", "Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "format_log_to_str": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/"}, "ReActJsonSingleInputOutputParser": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/"}, "render_text_description": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/prompting/"}, "AzureMLChatOnlineEndpoint": {"azureml_chat_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/"}, "AzureMLEndpointApiType": {"azureml_chat_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/", "azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "CustomOpenAIChatContentFormatter": {"azureml_chat_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/"}, "ChatKinetica": {"Install Langchain community and core packages": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/", "Kinetica": "https://python.langchain.com/v0.2/docs/integrations/providers/kinetica/"}, "KineticaSqlOutputParser": {"Install Langchain community and core packages": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/"}, "KineticaSqlResponse": {"Install Langchain community and core packages": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/"}, "PaiEasChatEndpoint": {"alibaba_cloud_pai_eas.md": "https://python.langchain.com/v0.2/docs/integrations/chat/alibaba_cloud_pai_eas/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/"}, "ChatFireworks": {"Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/chat/fireworks/", "structured_output.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/structured_output/", "response_metadata.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/response_metadata/"}, "ChatOctoAI": {"octoai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/octoai/"}, "ChatDeepInfra": {"get a new token: https://deepinfra.com/login?from=%2Fdash": "https://python.langchain.com/v0.2/docs/integrations/chat/deepinfra/", "DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/providers/deepinfra/"}, "StreamingStdOutCallbackHandler": {"get a new token: https://deepinfra.com/login?from=%2Fdash": "https://python.langchain.com/v0.2/docs/integrations/chat/deepinfra/", "litellm.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/", "Let\u2019s try out LLAMA model offered on EverlyAI Hosted Endpoints {#lets-try-out-llama-model-offered-on-everlyai-hosted-endpoints}": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/", "gpt_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/", "litellm_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/", "zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "yuan2.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/providers/gpt4all/", "arthur_tracking.md": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "textgen.md": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "Callbacks support token-wise streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "Guardrails for Amazon Bedrock with trace": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/", "Note importing TitanTakeoffPro instead of TitanTakeoff will work as well both use same object under the hood": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/", "Run the chain specifying only the input variable for the first chain.": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "ctransformers.md": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/", "get a token: https://huggingface.co/docs/api-inference/quicktour#get-your-api-token": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/", "magics to auto-reload external modules in case you are making changes to langchain while working on this notebook": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/"}, "ToolsOutputParser": {"open ../../../static/img/brand/wordmark.png as base64 str": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/"}, "ChatGroq": {"groq.md": "https://python.langchain.com/v0.2/docs/integrations/chat/groq/", "structured_output.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/structured_output/", "response_metadata.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/response_metadata/"}, "ChatLiteLLM": {"litellm.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/"}, "CallbackManager": {"litellm.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/", "gpt_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/", "litellm_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/", "zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Callbacks support token-wise streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "Note importing TitanTakeoffPro instead of TitanTakeoff will work as well both use same object under the hood": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/"}, "LlamaEdgeChatService": {"service url": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_edge/"}, "HarmBlockThreshold": {"Note that each chunk may contain more than one \"token\"": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/", "for running these examples in the notebook:": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "google_ai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_ai/"}, "HarmCategory": {"Note that each chunk may contain more than one \"token\"": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/", "for running these examples in the notebook:": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "google_ai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_ai/"}, "OllamaFunctions": {"Schema": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama_functions/", "Ollama": "https://python.langchain.com/v0.2/docs/integrations/providers/ollama/"}, "create_extraction_chain": {"Schema": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama_functions/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "VolcEngineMaasChat": {"Install the package": "https://python.langchain.com/v0.2/docs/integrations/chat/volcengine_maas/"}, "ChatLlamaAPI": {"Replace 'Your_API_Token' with your actual API token": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_api/"}, "create_tagging_chain": {"Replace 'Your_API_Token' with your actual API token": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_api/"}, "ChatKonko": {"Konko {#konko}": "https://python.langchain.com/v0.2/docs/integrations/chat/konko/"}, "create_structured_runnable": {"for running these examples in the notebook:": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/"}, "MLXPipeline": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "mlx_pipelines.md": "https://python.langchain.com/v0.2/docs/integrations/llms/mlx_pipelines/"}, "ChatMLX": {"setup tools": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/"}, "GigaChat": {"gigachat.md": "https://python.langchain.com/v0.2/docs/integrations/llms/gigachat/", "Salute Devices": "https://python.langchain.com/v0.2/docs/integrations/providers/salute_devices/"}, "JinaChat": {"get a chat completion from the formatted messages": "https://python.langchain.com/v0.2/docs/integrations/chat/jinachat/"}, "SystemMessagePromptTemplate": {"get a chat completion from the formatted messages": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "see https://python.langchain.com/en/latest/modules/data_connection/getting_started.html for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/agents/", "[Beta] Memory": "https://python.langchain.com/v0.2/docs/modules/memory/.ipynb_checkpoints/index-checkpoint/", "Prompts": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/prompts-checkpoint/"}, "ChatOllama": {"LangChain supports many other chat models. Here, we're using Ollama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "Ollama": "https://python.langchain.com/v0.2/docs/integrations/providers/ollama/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/"}, "get_openai_callback": {"azure_chat_openai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/azure_chat_openai/", "You can kick off concurrent runs from within the context manager": "https://python.langchain.com/v0.2/docs/modules/callbacks/token_counting/", "!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/", "token_usage_tracking.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/token_usage_tracking/", "Run custom functions {#run-custom-functions}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/functions/"}, "ChatEverlyAI": {"Let\u2019s try out LLAMA model offered on EverlyAI Hosted Endpoints {#lets-try-out-llama-model-offered-on-everlyai-hosted-endpoints}": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/"}, "GPTRouter": {"gpt_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/"}, "GPTRouterModel": {"gpt_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/"}, "ChatLiteLLMRouter": {"litellm_router.md": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/"}, "ChatFriendli": {"friendli.md": "https://python.langchain.com/v0.2/docs/integrations/chat/friendli/"}, "ChatMistralAI": {"If api_key is not passed, default behavior is to use the `MISTRAL_API_KEY` environment variable.": "https://python.langchain.com/v0.2/docs/integrations/chat/mistralai/", "mistralai.md": "https://python.langchain.com/v0.2/docs/integrations/providers/mistralai/", "Install a model capable of tool calling": "https://python.langchain.com/v0.2/docs/use_cases/extraction/quickstart/", "structured_output.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/structured_output/", "response_metadata.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/response_metadata/"}, "ChatZhipuAI": {"zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/"}, "create_json_chat_agent": {"zhipuai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/json_agent/"}, "ChatBaichuan": {"baichuan.md": "https://python.langchain.com/v0.2/docs/integrations/chat/baichuan/", "Baichuan": "https://python.langchain.com/v0.2/docs/integrations/providers/baichuan/"}, "Llama2Chat": {"!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/"}, "LlamaCpp": {"!pip3 install text-generation": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/providers/llamacpp/", "Callbacks support token-wise streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/"}, "QianfanChatEndpoint": {"baidu_qianfan_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/chat/baidu_qianfan_endpoint/", "ernie.md": "https://python.langchain.com/v0.2/docs/integrations/chat/ernie/", "Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/"}, "ChatEdenAI": {"edenai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/edenai/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "ErnieBotChat": {"ernie.md": "https://python.langchain.com/v0.2/docs/integrations/chat/ernie/"}, "ChatHunyuan": {"tencent_hunyuan.md": "https://python.langchain.com/v0.2/docs/integrations/chat/tencent_hunyuan/", "Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/"}, "MiniMaxChat": {"minimax.md": "https://python.langchain.com/v0.2/docs/integrations/chat/minimax/", "Minimax": "https://python.langchain.com/v0.2/docs/integrations/providers/minimax/"}, "ChatYuan2": {"yuan2.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/"}, "ChatTongyi": {"Install the package": "https://python.langchain.com/v0.2/docs/integrations/chat/tongyi/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/"}, "PromptLayerChatOpenAI": {"promptlayer_chatopenai.md": "https://python.langchain.com/v0.2/docs/integrations/chat/promptlayer_chatopenai/", "PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/providers/promptlayer/"}, "ChatSparkLLM": {"sparkllm.md": "https://python.langchain.com/v0.2/docs/integrations/chat/sparkllm/"}, "MoonshotChat": {"Generate your api key from: https://platform.moonshot.cn/console/api-keys": "https://python.langchain.com/v0.2/docs/integrations/chat/moonshot/"}, "ChatDappierAI": {"dappier.md": "https://python.langchain.com/v0.2/docs/integrations/chat/dappier/"}, "ChatMaritalk": {"Loading the COMVEST 2024 notice": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/"}, "OnlinePDFLoader": {"Loading the COMVEST 2024 notice": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "load_qa_chain": {"Loading the COMVEST 2024 notice": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "You can store your OPENAI_API_KEY in a .env file as well": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/amazon_textract/", "sagemaker.md": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "adding_memory_chain_multiple_inputs.md": "https://python.langchain.com/v0.2/docs/modules/memory/adding_memory_chain_multiple_inputs/"}, "ChatPremAI": {"First step is to set up the env variable.": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/"}, "ChatAnyscale": {"Let\u2019s try out each model offered on Anyscale Endpoints {#lets-try-out-each-model-offered-on-anyscale-endpoints}": "https://python.langchain.com/v0.2/docs/integrations/chat/anyscale/", "Anyscale": "https://python.langchain.com/v0.2/docs/integrations/providers/anyscale/"}, "ChatYandexGPT": {"yandex.md": "https://python.langchain.com/v0.2/docs/integrations/chat/yandex/", "Yandex": "https://python.langchain.com/v0.2/docs/integrations/providers/yandex/"}, "ChatPerplexity": {"perplexity.md": "https://python.langchain.com/v0.2/docs/integrations/chat/perplexity/"}, "ChatAnthropicTools": {"anthropic_functions.md": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic_functions/"}, "ChatMessage": {"Or via the async API": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/"}, "ConversationChain": {"Or via the async API": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "Guardrails for Amazon Bedrock with trace": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/", "!python -m spacy download en_core_web_lg": "https://python.langchain.com/v0.2/docs/modules/memory/custom_memory/", "Combined": "https://python.langchain.com/v0.2/docs/modules/memory/multiple_memory/", "Here it is by default set to \"AI\"": "https://python.langchain.com/v0.2/docs/modules/memory/conversational_customization/", "kg.md": "https://python.langchain.com/v0.2/docs/modules/memory/types/kg/", "We can see here that the buffer is updated": "https://python.langchain.com/v0.2/docs/modules/memory/types/token_buffer/", "Entity": "https://python.langchain.com/v0.2/docs/modules/memory/types/entity_summary_memory/", "Conversation Summary": "https://python.langchain.com/v0.2/docs/modules/memory/types/summary/", "Backed by a Vector Store": "https://python.langchain.com/v0.2/docs/modules/memory/types/vectorstore_retriever_memory/", "Conversation Buffer Window": "https://python.langchain.com/v0.2/docs/modules/memory/types/buffer_window/", "Conversation Buffer": "https://python.langchain.com/v0.2/docs/modules/memory/types/buffer/", "We can see here that there is a summary of the conversation and then some previous interactions": "https://python.langchain.com/v0.2/docs/modules/memory/types/summary_buffer/"}, "DeepEvalCallbackHandler": {"Here we want to make sure the answer is minimally relevant": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "Confident AI": "https://python.langchain.com/v0.2/docs/integrations/providers/confident/"}, "LLMonitorCallbackHandler": {"LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/providers/llmonitor/"}, "identify": {"LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/"}, "ContextCallbackHandler": {"context.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "Context": "https://python.langchain.com/v0.2/docs/integrations/providers/context/"}, "FiddlerCallbackHandler": {"Fiddler project and model names, used for model registration": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "Fiddler": "https://python.langchain.com/v0.2/docs/integrations/providers/fiddler/"}, "FewShotChatMessagePromptTemplate": {"Fiddler project and model names, used for model registration": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "This is a prompt template used to format each individual example.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples_chat/"}, "LabelStudioCallbackHandler": {"labelstudio.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "Label Studio": "https://python.langchain.com/v0.2/docs/integrations/providers/labelstudio/"}, "CometTracer": {"Connect to Comet if no API Key is set": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/"}, "ArgillaCallbackHandler": {"argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Argilla": "https://python.langchain.com/v0.2/docs/integrations/providers/argilla/"}, "StdOutCallbackHandler": {"argilla.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "Setup and use the ClearML Callback": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Callbacks": "https://python.langchain.com/v0.2/docs/modules/callbacks/index/", "this chain will both print to stdout (because verbose=True) and write to 'output.log'": "https://python.langchain.com/v0.2/docs/modules/callbacks/filecallbackhandler/"}, "PromptLayerCallbackHandler": {"promptlayer.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/providers/promptlayer/"}, "GPT4All": {"promptlayer.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/providers/gpt4all/", "Callbacks support token-wise streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/gpt4all/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/"}, "StreamlitCallbackHandler": {"Streamlit": "https://python.langchain.com/v0.2/docs/integrations/providers/streamlit/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/providers/gpt4all/"}, "MultiQueryRetriever": {"1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_summary/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/"}, "UpTrainCallbackHandler": {"1. Vanilla RAG {#vanilla-rag-1}": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/providers/uptrain/"}, "TrubricsCallbackHandler": {"trubrics.md": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "Trubrics": "https://python.langchain.com/v0.2/docs/integrations/providers/trubrics/"}, "InfinoCallbackHandler": {"Install necessary dependencies.": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "Infino": "https://python.langchain.com/v0.2/docs/integrations/providers/infino/"}, "load_summarize_chain": {"Install necessary dependencies.": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "see https://python.langchain.com/v0.2/docs/use_cases/summarization for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/larksuite/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/"}, "FigmaFileLoader": {"Figma": "https://python.langchain.com/v0.2/docs/integrations/providers/figma/", "see https://python.langchain.com/en/latest/modules/data_connection/getting_started.html for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/"}, "Baseten": {"Baseten": "https://python.langchain.com/v0.2/docs/integrations/providers/baseten/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/baseten/"}, "WeatherDataLoader": {"Weather": "https://python.langchain.com/v0.2/docs/integrations/providers/weather/", "Set API key either by passing it in to constructor directly": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/weather/"}, "Tair": {"Tair": "https://python.langchain.com/v0.2/docs/integrations/providers/tair/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "drop first if index already exists": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/"}, "CollegeConfidentialLoader": {"College Confidential": "https://python.langchain.com/v0.2/docs/integrations/providers/college_confidential/", "college_confidential.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/college_confidential/"}, "RWKV": {"RWKV-4": "https://python.langchain.com/v0.2/docs/integrations/providers/rwkv/"}, "LakeFSLoader": {"lakeFS": "https://python.langchain.com/v0.2/docs/integrations/providers/lakefs/", "lakefs.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/lakefs/"}, "FaunaLoader": {"Fauna": "https://python.langchain.com/v0.2/docs/integrations/providers/fauna/", "fauna.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/fauna/"}, "OCIGenAI": {"Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/", "use default authN method API-key": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/"}, "OCIModelDeploymentVLLM": {"Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/", "Set authentication through ads": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_model_deployment_endpoint/"}, "OCIModelDeploymentTGI": {"Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/", "Set authentication through ads": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_model_deployment_endpoint/"}, "Lantern": {"Lantern": "https://python.langchain.com/v0.2/docs/integrations/providers/lantern/", "Pip install necessary package {#pip-install-necessary-package}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/"}, "SQLiteCache": {"From LangChain, import standard modules for prompting.": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/llm_caching/", "": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/chat_model_caching/"}, "set_llm_cache": {"From LangChain, import standard modules for prompting.": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/providers/mongodb_atlas/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/redis/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/", "Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/llm_caching/", "": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/chat_model_caching/"}, "Fireworks": {"Fireworks": "https://python.langchain.com/v0.2/docs/integrations/providers/fireworks/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/fireworks/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/self_ask_with_search/"}, "DropboxLoader": {"Dropbox": "https://python.langchain.com/v0.2/docs/integrations/providers/dropbox/", "Generate access token: https://www.dropbox.com/developers/apps/create.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/dropbox/"}, "ForefrontAI": {"ForefrontAI": "https://python.langchain.com/v0.2/docs/integrations/providers/forefrontai/", "get a new token: https://docs.forefront.ai/forefront/api-reference/authentication": "https://python.langchain.com/v0.2/docs/integrations/llms/forefrontai/"}, "CometCallbackHandler": {"os.environ[\"OPENAI_ORGANIZATION\"] = \"...\"": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/"}, "CTransformers": {"C Transformers": "https://python.langchain.com/v0.2/docs/integrations/providers/ctransformers/", "ctransformers.md": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/"}, "BiliBiliLoader": {"BiliBili": "https://python.langchain.com/v0.2/docs/integrations/providers/bilibili/", "bilibili.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/bilibili/"}, "TencentCOSDirectoryLoader": {"Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/", "tencent_cos_directory.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tencent_cos_directory/"}, "TencentCOSFileLoader": {"Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/", "tencent_cos_file.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tencent_cos_file/"}, "OBSDirectoryLoader": {"Huawei": "https://python.langchain.com/v0.2/docs/integrations/providers/huawei/", "Install the required package": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/huawei_obs_directory/"}, "OBSFileLoader": {"Huawei": "https://python.langchain.com/v0.2/docs/integrations/providers/huawei/", "Install the required package": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/huawei_obs_file/"}, "DiffbotLoader": {"Diffbot": "https://python.langchain.com/v0.2/docs/integrations/providers/diffbot/", "diffbot.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/diffbot/"}, "DeepSparse": {"DeepSparse": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/deepsparse-checkpoint/", "deepsparse.md": "https://python.langchain.com/v0.2/docs/integrations/llms/deepsparse/"}, "AimCallbackHandler": {"scenario 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/"}, "ModernTreasuryLoader": {"Modern Treasury": "https://python.langchain.com/v0.2/docs/integrations/providers/modern_treasury/", "Create a vectorstore retriever from the loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/modern_treasury/"}, "GitHubIssuesLoader": {"GitHub": "https://python.langchain.com/v0.2/docs/integrations/providers/github/", "If you haven't set your access token as an environment variable, pass it in here.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/github/"}, "GithubFileLoader": {"GitHub": "https://python.langchain.com/v0.2/docs/integrations/providers/github/", "If you haven't set your access token as an environment variable, pass it in here.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/github/"}, "Banana": {"Banana": "https://python.langchain.com/v0.2/docs/integrations/providers/bananadev/", "Install the package https://docs.banana.dev/banana-docs/core-concepts/sdks/python": "https://python.langchain.com/v0.2/docs/integrations/llms/banana/"}, "InfinispanVS": {"Infinispan VS": "https://python.langchain.com/v0.2/docs/integrations/providers/infinispanvs/", "Ensure that all we need is installed": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/infinispanvs/"}, "CerebriumAI": {"CerebriumAI": "https://python.langchain.com/v0.2/docs/integrations/providers/cerebriumai/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/cerebriumai/"}, "GutenbergLoader": {"Gutenberg": "https://python.langchain.com/v0.2/docs/integrations/providers/gutenberg/", "gutenberg.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/gutenberg/"}, "WikipediaLoader": {"Wikipedia": "https://python.langchain.com/v0.2/docs/integrations/providers/wikipedia/", "wikipedia.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/wikipedia/", "diffbot.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/"}, "ConfluenceLoader": {"Confluence": "https://python.langchain.com/v0.2/docs/integrations/providers/confluence/", "confluence.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/confluence/"}, "Predibase": {"Predibase": "https://python.langchain.com/v0.2/docs/integrations/providers/predibase/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/predibase/"}, "Beam": {"Beam": "https://python.langchain.com/v0.2/docs/integrations/providers/beam/", "Set the environment variables": "https://python.langchain.com/v0.2/docs/integrations/llms/beam/"}, "GrobidParser": {"Grobid": "https://python.langchain.com/v0.2/docs/integrations/providers/grobid/", "grobid.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/grobid/"}, "GenericLoader": {"Grobid": "https://python.langchain.com/v0.2/docs/integrations/providers/grobid/", "set a flag to switch between local and remote parsing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "grobid.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/grobid/", "Code for: class MyClass:": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "Custom Document Loader {#custom-document-loader}": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/custom/"}, "Typesense": {"Typesense": "https://python.langchain.com/v0.2/docs/integrations/providers/typesense/", "typesense.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/"}, "Hologres": {"Hologres": "https://python.langchain.com/v0.2/docs/integrations/providers/hologres/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "hologres.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/"}, "AI21": {"AI21 Labs": "https://python.langchain.com/v0.2/docs/integrations/providers/ai21/"}, "ArangoGraph": {"ArangoDB": "https://python.langchain.com/v0.2/docs/integrations/providers/arangodb/", "Instantiate ArangoDB Database": "https://python.langchain.com/v0.2/docs/integrations/graphs/arangodb/"}, "ArangoGraphQAChain": {"ArangoDB": "https://python.langchain.com/v0.2/docs/integrations/providers/arangodb/", "Instantiate ArangoDB Database": "https://python.langchain.com/v0.2/docs/integrations/graphs/arangodb/"}, "ArcGISLoader": {"ArcGIS": "https://python.langchain.com/v0.2/docs/integrations/providers/arcgis/", "arcgis.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/arcgis/"}, "WandbCallbackHandler": {"os.environ[\"OPENAI_API_KEY\"] = \"\"": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/"}, "ObsidianLoader": {"Obsidian": "https://python.langchain.com/v0.2/docs/integrations/providers/obsidian/", "obsidian.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/obsidian/"}, "create_sql_agent": {"CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Using LangSmith is recommended but not required. Uncomment below lines to use.": "https://python.langchain.com/v0.2/docs/use_cases/sql/csv/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/agents/"}, "SQLDatabaseToolkit": {"CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/"}, "Nebula": {"Nebula": "https://python.langchain.com/v0.2/docs/integrations/providers/symblai_nebula/", "symblai_nebula.md": "https://python.langchain.com/v0.2/docs/integrations/llms/symblai_nebula/"}, "Writer": {"Writer": "https://python.langchain.com/v0.2/docs/integrations/providers/writer/", "If you get an error, probably, you need to set up the \"base_url\" parameter that can be taken from the error log.": "https://python.langchain.com/v0.2/docs/integrations/llms/writer/"}, "BaichuanLLM": {"Baichuan": "https://python.langchain.com/v0.2/docs/integrations/providers/baichuan/", "Load the model": "https://python.langchain.com/v0.2/docs/integrations/llms/baichuan/"}, "ApacheDoris": {"Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/providers/apache_doris/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/"}, "ZepVectorStore": {"Zep": "https://python.langchain.com/v0.2/docs/integrations/providers/zep/", "Collection config is needed if we're creating a new Zep Collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/"}, "BrowserlessLoader": {"Browserless": "https://python.langchain.com/v0.2/docs/integrations/providers/browserless/", "browserless.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/browserless/"}, "AZLyricsLoader": {"AZLyrics": "https://python.langchain.com/v0.2/docs/integrations/providers/azlyrics/", "azlyrics.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azlyrics/"}, "ToMarkdownLoader": {"2Markdown": "https://python.langchain.com/v0.2/docs/integrations/providers/tomarkdown/", "You will need to get your own API key. See https://2markdown.com/login": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tomarkdown/"}, "Mlflow": {"MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/"}, "MlflowEmbeddings": {"MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/"}, "ChatMlflow": {"MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/"}, "GitLoader": {"Git": "https://python.langchain.com/v0.2/docs/integrations/providers/git/", "e.g. loading only python files": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/git/"}, "MlflowAIGateway": {"MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/"}, "MlflowAIGatewayEmbeddings": {"MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/"}, "ChatMLflowAIGateway": {"MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/"}, "Tigris": {"Tigris": "https://python.langchain.com/v0.2/docs/integrations/providers/tigris/", "tigris.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/"}, "Meilisearch": {"Meilisearch": "https://python.langchain.com/v0.2/docs/integrations/providers/meilisearch/", "Use Meilisearch vector store to store texts & associated embeddings as vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/"}, "SQLDatabaseChain": {"!pip3 install rebuff openai -U": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/"}, "SnowflakeLoader": {"Snowflake": "https://python.langchain.com/v0.2/docs/integrations/providers/snowflake/", "snowflake.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/snowflake/"}, "CubeSemanticLoader": {"Cube": "https://python.langchain.com/v0.2/docs/integrations/providers/cube/", "Read more about security context here: https://cube.dev/docs/security": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/cube_semantic/"}, "Clickhouse": {"ClickHouse": "https://python.langchain.com/v0.2/docs/integrations/providers/clickhouse/", "clickhouse.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/"}, "ClickhouseSettings": {"ClickHouse": "https://python.langchain.com/v0.2/docs/integrations/providers/clickhouse/", "clickhouse.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/"}, "ChatDatabricks": {"-> content='Hello! How can I assist you today?'": "https://python.langchain.com/v0.2/docs/integrations/providers/databricks/", "If running a Databricks notebook attached to an interactive cluster in \"single user\"": "https://python.langchain.com/v0.2/docs/integrations/llms/databricks/"}, "DatabricksEmbeddings": {"-> content='Hello! How can I assist you today?'": "https://python.langchain.com/v0.2/docs/integrations/providers/databricks/", "If running a Databricks notebook attached to an interactive cluster in \"single user\"": "https://python.langchain.com/v0.2/docs/integrations/llms/databricks/"}, "TelegramChatFileLoader": {"Telegram": "https://python.langchain.com/v0.2/docs/integrations/providers/telegram/", "telegram.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/telegram/"}, "TelegramChatApiLoader": {"Telegram": "https://python.langchain.com/v0.2/docs/integrations/providers/telegram/", "telegram.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/telegram/"}, "PredictionGuard": {"Prediction Guard": "https://python.langchain.com/v0.2/docs/integrations/providers/predictionguard/", "Optional, add your OpenAI API Key. This is optional, as Prediction Guard allows": "https://python.langchain.com/v0.2/docs/integrations/llms/predictionguard/"}, "Together": {"together.md": "https://python.langchain.com/v0.2/docs/integrations/llms/together/"}, "NotionDirectoryLoader": {"Notion DB": "https://python.langchain.com/v0.2/docs/integrations/providers/notion/", "notion.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/notion/"}, "NotionDBLoader": {"Notion DB": "https://python.langchain.com/v0.2/docs/integrations/providers/notion/", "notiondb.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/notiondb/"}, "MWDumpLoader": {"MediaWikiDump": "https://python.langchain.com/v0.2/docs/integrations/providers/mediawikidump/", "mediawiki-utilities supports XML schema 0.11 in unmerged branches": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mediawikidump/"}, "BraveSearchLoader": {"Brave Search": "https://python.langchain.com/v0.2/docs/integrations/providers/brave_search/", "brave_search.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/brave_search/"}, "StarRocks": {"StarRocks": "https://python.langchain.com/v0.2/docs/integrations/providers/starrocks/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/"}, "GooseAI": {"GooseAI": "https://python.langchain.com/v0.2/docs/integrations/providers/gooseai/", "gooseai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/gooseai/"}, "DatadogLogsLoader": {"Datadog Logs": "https://python.langchain.com/v0.2/docs/integrations/providers/datadog_logs/", "datadog_logs.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/datadog_logs/"}, "ApifyDatasetLoader": {"Apify": "https://python.langchain.com/v0.2/docs/integrations/providers/apify/", "apify_dataset.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/"}, "NLPCloud": {"NLPCloud": "https://python.langchain.com/v0.2/docs/integrations/providers/nlpcloud/", "get a token: https://docs.nlpcloud.com/#authentication": "https://python.langchain.com/v0.2/docs/integrations/llms/nlpcloud/"}, "SemaDB": {"SemaDB": "https://python.langchain.com/v0.2/docs/integrations/providers/semadb/", "Create collection if running for the first time. If the collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/"}, "GitbookLoader": {"GitBook": "https://python.langchain.com/v0.2/docs/integrations/providers/gitbook/", "show second document": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/gitbook/"}, "VoyageAIRerank": {"VoyageAI": "https://python.langchain.com/v0.2/docs/integrations/providers/voyageai/", "OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/"}, "Rockset": {"Rockset": "https://python.langchain.com/v0.2/docs/integrations/providers/rockset/", "output length: 4": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/"}, "RocksetLoader": {"Rockset": "https://python.langchain.com/v0.2/docs/integrations/providers/rockset/", "Loading Documents {#loading-documents}": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rockset/"}, "Minimax": {"Minimax": "https://python.langchain.com/v0.2/docs/integrations/providers/minimax/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/minimax/"}, "UnstructuredAPIFileIOLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/"}, "UnstructuredAPIFileLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "# Install package": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/unstructured_file/"}, "UnstructuredCHMLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/"}, "UnstructuredCSVLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "csv.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/csv/"}, "UnstructuredEmailLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "email.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/email/"}, "UnstructuredEPubLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "epub.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/epub/"}, "UnstructuredFileIOLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "folder_id='1yucgL9WGgWZdM1TOuKkeghlPizuzMYb5'": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_drive/"}, "UnstructuredFileLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "# Install package": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/unstructured_file/"}, "UnstructuredHTMLLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "HTML": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/html/"}, "UnstructuredImageLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "image.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image/"}, "UnstructuredMarkdownLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "Markdown": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/markdown/"}, "UnstructuredODTLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "odt.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/odt/"}, "UnstructuredOrgModeLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "org_mode.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/org_mode/"}, "UnstructuredPDFLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "UnstructuredRSTLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "rst.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rst/"}, "UnstructuredRTFLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/"}, "UnstructuredTSVLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "tsv.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tsv/"}, "UnstructuredURLLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "url.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/url/"}, "UnstructuredXMLLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "xml.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/xml/"}, "SelfHostedPipeline": {"Runhouse": "https://python.langchain.com/v0.2/docs/integrations/providers/runhouse/", "For an on-demand A100 with GCP, Azure, or Lambda": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/"}, "SelfHostedHuggingFaceLLM": {"Runhouse": "https://python.langchain.com/v0.2/docs/integrations/providers/runhouse/", "For an on-demand A100 with GCP, Azure, or Lambda": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/"}, "MlflowCallbackHandler": {"SCENARIO 1 - LLM": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/"}, "AstraDBVectorStore": {"Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/"}, "AstraDBCache": {"Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "AstraDBSemanticCache": {"Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "AstraDBLoader": {"Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/astradb/"}, "AstraDBStore": {"Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/stores/astradb/"}, "AstraDBByteStore": {"Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/stores/astradb/"}, "SpreedlyLoader": {"Spreedly": "https://python.langchain.com/v0.2/docs/integrations/providers/spreedly/", "Create a vectorstore retriever from the loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/spreedly/"}, "OpenLLM": {"OpenLLM": "https://python.langchain.com/v0.2/docs/integrations/providers/openllm/", "openllm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/openllm/"}, "PubMedLoader": {"PubMed": "https://python.langchain.com/v0.2/docs/integrations/providers/pubmed/", "pubmed.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pubmed/"}, "SearxSearchResults": {"SearxNG Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/searx/"}, "ActionServerToolkit": {"Robocorp": "https://python.langchain.com/v0.2/docs/integrations/providers/robocorp/", "Install package": "https://python.langchain.com/v0.2/docs/integrations/toolkits/robocorp/"}, "SpacyTextSplitter": {"spaCy": "https://python.langchain.com/v0.2/docs/integrations/providers/spacy/", "atlas.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/atlas/", "This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/split_by_token/"}, "Modal": {"Modal": "https://python.langchain.com/v0.2/docs/integrations/providers/modal/", "Register an account with Modal and get a new token.": "https://python.langchain.com/v0.2/docs/integrations/llms/modal/"}, "OpenCityDataLoader": {"Geopandas": "https://python.langchain.com/v0.2/docs/integrations/providers/geopandas/", "Load Open City Data": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/geopandas/", "open_city_data.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/open_city_data/"}, "PGEmbedding": {"Postgres Embedding": "https://python.langchain.com/v0.2/docs/integrations/providers/pg_embedding/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/"}, "SQLiteVSS": {"SQLite": "https://python.langchain.com/v0.2/docs/integrations/providers/sqlite/", "You need to install sqlite-vss as a dependency.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/"}, "Xinference": {"Xorbits Inference (Xinference)": "https://python.langchain.com/v0.2/docs/integrations/providers/xinference/", "xinference.md": "https://python.langchain.com/v0.2/docs/integrations/llms/xinference/"}, "IFixitLoader": {"iFixit": "https://python.langchain.com/v0.2/docs/integrations/providers/ifixit/", "ifixit.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/ifixit/"}, "AlephAlpha": {"Aleph Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/aleph_alpha/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/aleph_alpha/"}, "PipelineAI": {"PipelineAI": "https://python.langchain.com/v0.2/docs/integrations/providers/pipelineai/", "Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/pipelineai/"}, "FacebookChatLoader": {"Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/", "pip install pandas": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/facebook_chat/"}, "Epsilla": {"Epsilla": "https://python.langchain.com/v0.2/docs/integrations/providers/epsilla/", "epsilla.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/"}, "AwaDB": {"AwaDB": "https://python.langchain.com/v0.2/docs/integrations/providers/awadb/", "awadb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/awadb/"}, "ArxivLoader": {"Arxiv": "https://python.langchain.com/v0.2/docs/integrations/providers/arxiv/", "arxiv.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/arxiv/"}, "BlockchainDocumentLoader": {"Alchemy": "https://python.langchain.com/v0.2/docs/integrations/providers/alchemy/", "get ALCHEMY_API_KEY from https://www.alchemy.com/": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/blockchain/"}, "BlockchainType": {"Alchemy": "https://python.langchain.com/v0.2/docs/integrations/providers/alchemy/", "get ALCHEMY_API_KEY from https://www.alchemy.com/": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/blockchain/"}, "Anyscale": {"Anyscale": "https://python.langchain.com/v0.2/docs/integrations/providers/anyscale/", "anyscale.md": "https://python.langchain.com/v0.2/docs/integrations/llms/anyscale/"}, "AINetworkToolkit": {"AINetwork": "https://python.langchain.com/v0.2/docs/integrations/providers/ainetwork/", "IMPORTANT: If you plan to use this account in the future, make sure to save the": "https://python.langchain.com/v0.2/docs/integrations/toolkits/ainetwork/"}, "StripeLoader": {"Stripe": "https://python.langchain.com/v0.2/docs/integrations/providers/stripe/", "Create a vectorstore retriever from the loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/stripe/"}, "StochasticAI": {"StochasticAI": "https://python.langchain.com/v0.2/docs/integrations/providers/stochasticai/", "stochasticai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/stochasticai/"}, "Bagel": {"BagelDB": "https://python.langchain.com/v0.2/docs/integrations/providers/bageldb/", "create cluster and add texts": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bageldb/"}, "TigerGraph": {"TigerGraph": "https://python.langchain.com/v0.2/docs/integrations/providers/tigergraph/"}, "BlackboardLoader": {"Blackboard": "https://python.langchain.com/v0.2/docs/integrations/providers/blackboard/", "blackboard.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/blackboard/"}, "YandexGPT": {"Yandex": "https://python.langchain.com/v0.2/docs/integrations/providers/yandex/", "yandex.md": "https://python.langchain.com/v0.2/docs/integrations/llms/yandex/"}, "LanceDB": {"LanceDB": "https://python.langchain.com/v0.2/docs/integrations/providers/lancedb/", "lancedb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "Vector stores": "https://python.langchain.com/v0.2/docs/modules/data_connection/vectorstores/index/"}, "UpstashRedisCache": {"Upstash Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/upstash/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "NucliaTextTransformer": {"Nuclia": "https://python.langchain.com/v0.2/docs/integrations/providers/nuclia/", "nuclia_transformer.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/nuclia_transformer/"}, "AnalyticDB": {"AnalyticDB": "https://python.langchain.com/v0.2/docs/integrations/providers/analyticdb/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "analyticdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/"}, "GoogleApiYoutubeLoader": {"YouTube": "https://python.langchain.com/v0.2/docs/integrations/providers/youtube/", "Init the GoogleApiClient": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_transcript/"}, "PromptLayerOpenAI": {"PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/providers/promptlayer/", "promptlayer_openai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/promptlayer_openai/"}, "USearch": {"USearch": "https://python.langchain.com/v0.2/docs/integrations/providers/usearch/", "usearch.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/"}, "EtherscanLoader": {"Etherscan": "https://python.langchain.com/v0.2/docs/integrations/providers/etherscan/", "etherscan.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/etherscan/"}, "Arcee": {"Arcee": "https://python.langchain.com/v0.2/docs/integrations/providers/arcee/", "Create an instance of the Arcee class": "https://python.langchain.com/v0.2/docs/integrations/llms/arcee/"}, "WhyLabsCallbackHandler": {"you don't need to call close to write profiles to WhyLabs, upload will occur periodically, but to demo let's not wait.": "https://python.langchain.com/v0.2/docs/integrations/providers/whylabs_profiling/"}, "IuguLoader": {"Iugu": "https://python.langchain.com/v0.2/docs/integrations/providers/iugu/", "Create a vectorstore retriever from the loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/iugu/"}, "CouchbaseLoader": {"Couchbase": "https://python.langchain.com/v0.2/docs/integrations/providers/couchbase/", "query is a valid SQL++ query": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/couchbase/"}, "FlyteCallbackHandler": {"Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/"}, "wandb_tracing_enabled": {"wandb documentation to configure wandb using env variables": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/"}, "ManifestWrapper": {"Hazy Research": "https://python.langchain.com/v0.2/docs/integrations/providers/hazy_research/", "Map reduce example": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/"}, "OntotextGraphDBGraph": {"Ontotext GraphDB": "https://python.langchain.com/v0.2/docs/integrations/providers/ontotext_graphdb/", "feeding the schema using a user construct query": "https://python.langchain.com/v0.2/docs/integrations/graphs/ontotext/"}, "OntotextGraphDBQAChain": {"Ontotext GraphDB": "https://python.langchain.com/v0.2/docs/integrations/providers/ontotext_graphdb/", "feeding the schema using a user construct query": "https://python.langchain.com/v0.2/docs/integrations/graphs/ontotext/"}, "Marqo": {"Marqo": "https://python.langchain.com/v0.2/docs/integrations/providers/marqo/", "initialize marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/"}, "IMSDbLoader": {"IMSDb": "https://python.langchain.com/v0.2/docs/integrations/providers/imsdb/", "imsdb.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/imsdb/"}, "TiDBLoader": {"TiDB": "https://python.langchain.com/v0.2/docs/integrations/providers/tidb/", "copy from tidb cloud console\uff0creplace it with your own": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tidb/"}, "TiDBVectorStore": {"TiDB": "https://python.langchain.com/v0.2/docs/integrations/providers/tidb/", "Here we useimport getpass": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/"}, "DeepInfra": {"DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/providers/deepinfra/", "get a new token: https://deepinfra.com/login?from=%2Fdash": "https://python.langchain.com/v0.2/docs/integrations/llms/deepinfra/"}, "RedditPostsLoader": {"Reddit": "https://python.langchain.com/v0.2/docs/integrations/providers/reddit/", "load using 'subreddit' mode": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/reddit/"}, "TrelloLoader": {"Trello": "https://python.langchain.com/v0.2/docs/integrations/providers/trello/", "If you have already set the API key and token using environment variables,": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/trello/"}, "AtlasDB": {"Atlas": "https://python.langchain.com/v0.2/docs/integrations/providers/atlas/", "atlas.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/atlas/"}, "SKLearnVectorStore": {"scikit-learn": "https://python.langchain.com/v0.2/docs/integrations/providers/sklearn/", "# if you plan to use bson serialization, install also:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/"}, "EverNoteLoader": {"EverNote": "https://python.langchain.com/v0.2/docs/integrations/providers/evernote/", "lxml and html2text are required to parse EverNote notes": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/evernote/"}, "VDMS": {"VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/"}, "VDMS_Client": {"VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/"}, "TwitterTweetLoader": {"Twitter": "https://python.langchain.com/v0.2/docs/integrations/providers/twitter/", "Or load from access token and consumer keys": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/twitter/"}, "DiscordChatLoader": {"Discord": "https://python.langchain.com/v0.2/docs/integrations/providers/discord/", "discord.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/discord/"}, "AssemblyAIAudioTranscriptLoader": {"AssemblyAI": "https://python.langchain.com/v0.2/docs/integrations/providers/assemblyai/", "or a local file path: audio_file = \"./nbc.mp3\"": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/assemblyai/"}, "RedisCache": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/redis/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "RedisSemanticCache": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/redis/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "Kinetica": {"Kinetica": "https://python.langchain.com/v0.2/docs/integrations/providers/kinetica/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/"}, "ClearMLCallbackHandler": {"Setup and use the ClearML Callback": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/"}, "create_cohere_react_agent": {"Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/"}, "SlackDirectoryLoader": {"Slack": "https://python.langchain.com/v0.2/docs/integrations/providers/slack/", "Optionally set your Slack URL. This will give you proper URLs in the docs sources.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/slack/"}, "Ollama": {"Ollama": "https://python.langchain.com/v0.2/docs/integrations/providers/ollama/", "ollama.md": "https://python.langchain.com/v0.2/docs/integrations/llms/ollama/", "Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/"}, "HNLoader": {"Hacker News": "https://python.langchain.com/v0.2/docs/integrations/providers/hacker_news/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_spanner/", "hacker_news.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/hacker_news/"}, "CTranslate2": {"CTranslate2": "https://python.langchain.com/v0.2/docs/integrations/providers/ctranslate2/", "conversation can take several minutes": "https://python.langchain.com/v0.2/docs/integrations/llms/ctranslate2/"}, "QianfanLLMEndpoint": {"Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/", "baidu_qianfan_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/llms/baidu_qianfan_endpoint/"}, "BESVectorStore": {"Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/", "Create a bes instance and index docs.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/"}, "Aphrodite": {"PygmalionAI": "https://python.langchain.com/v0.2/docs/integrations/providers/pygmalionai/", "%pip list | grep aphrodite": "https://python.langchain.com/v0.2/docs/integrations/llms/aphrodite/"}, "PaiEasEndpoint": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "alibabacloud_pai_eas_endpoint.md": "https://python.langchain.com/v0.2/docs/integrations/llms/alibabacloud_pai_eas_endpoint/"}, "MaxComputeLoader": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "alibaba_cloud_maxcompute.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/alibaba_cloud_maxcompute/"}, "AlibabaCloudOpenSearch": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "for example": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/"}, "AlibabaCloudOpenSearchSettings": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "for example": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/"}, "DocusaurusLoader": {"Docusaurus": "https://python.langchain.com/v0.2/docs/integrations/providers/docusaurus/", "fixes a bug with asyncio and jupyter": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docusaurus/"}, "Annoy": {"Annoy": "https://python.langchain.com/v0.2/docs/integrations/providers/annoy/", "default metric is angular": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/"}, "BibtexLoader": {"BibTeX": "https://python.langchain.com/v0.2/docs/integrations/providers/bibtex/", "Create a dummy bibtex file and download a pdf.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/bibtex/"}, "Cassandra": {"Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/"}, "CassandraCache": {"Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "CassandraSemanticCache": {"Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "CassandraLoader": {"Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/cassandra/"}, "Vearch": {"Vearch": "https://python.langchain.com/v0.2/docs/integrations/providers/vearch/", "OR": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/"}, "JoplinLoader": {"Joplin": "https://python.langchain.com/v0.2/docs/integrations/providers/joplin/", "joplin.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/joplin/"}, "ArthurCallbackHandler": {"arthur_tracking.md": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/"}, "AcreomLoader": {"Acreom": "https://python.langchain.com/v0.2/docs/integrations/providers/acreom/", "acreom.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/acreom/"}, "KDBAI": {"KDB.AI": "https://python.langchain.com/v0.2/docs/integrations/providers/kdbai/", "Clean up KDB.AI \"documents\" table and index for similarity search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/"}, "DuckDBLoader": {"DuckDB": "https://python.langchain.com/v0.2/docs/integrations/providers/duckdb/", "duckdb.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/duckdb/"}, "Petals": {"Petals": "https://python.langchain.com/v0.2/docs/integrations/providers/petals/", "this can take several minutes to download big files!": "https://python.langchain.com/v0.2/docs/integrations/llms/petals/"}, "MomentoCache": {"Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "MomentoVectorIndex": {"Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/", "Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/"}, "NIBittensorLLM": {"Bittensor": "https://python.langchain.com/v0.2/docs/integrations/providers/bittensor/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/"}, "Neo4jVector": {"Neo4j": "https://python.langchain.com/v0.2/docs/integrations/providers/neo4j/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/prompting/"}, "Neo4jGraph": {"Neo4j": "https://python.langchain.com/v0.2/docs/integrations/providers/neo4j/", "diffbot.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/", "How many people played in Top Gun?": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/prompting/"}, "GraphCypherQAChain": {"Neo4j": "https://python.langchain.com/v0.2/docs/integrations/providers/neo4j/", "Creating and executing the seeding query": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/", "diffbot.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/", "How many people played in Top Gun?": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/prompting/"}, "DiffbotGraphTransformer": {"Neo4j": "https://python.langchain.com/v0.2/docs/integrations/providers/neo4j/", "diffbot.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/"}, "AirtableLoader": {"Airtable": "https://python.langchain.com/v0.2/docs/integrations/providers/airtable/", "airtable.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airtable/"}, "LarkSuiteDocLoader": {"ByteDance": "https://python.langchain.com/v0.2/docs/integrations/providers/byte_dance/", "see https://python.langchain.com/v0.2/docs/use_cases/summarization for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/larksuite/"}, "JavelinAIGateway": {"Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Step 4: Embeddings Example {#step-4-embeddings-example}": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/"}, "JavelinAIGatewayEmbeddings": {"Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Step 4: Embeddings Example {#step-4-embeddings-example}": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/"}, "ChatJavelinAIGateway": {"Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Step 4: Embeddings Example {#step-4-embeddings-example}": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/"}, "TensorflowDatasetLoader": {"TensorFlow Datasets": "https://python.langchain.com/v0.2/docs/integrations/providers/tensorflow_datasets/", "Feature structure of `mlqa/en` dataset:": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tensorflow_datasets/"}, "Clarifai": {"Clarifai": "https://python.langchain.com/v0.2/docs/integrations/providers/clarifai/", "Dependencies {#dependencies}": "https://python.langchain.com/v0.2/docs/integrations/llms/clarifai/"}, "DataheraldTextToSQL": {"Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/dataherald-checkpoint/"}, "RoamLoader": {"Roam": "https://python.langchain.com/v0.2/docs/integrations/providers/roam/", "roam.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/roam/"}, "create_openai_tools_agent": {"Construct the OpenAI Tools agent": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "sql_database.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/sql_database/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/slack/", "conversational_retrieval_agents.md": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/conversational_retrieval_agents/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/tool_usage/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/"}, "CONDENSE_QUESTION_PROMPT": {"Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/"}, "load_qa_with_sources_chain": {"Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/"}, "QA_PROMPT": {"Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/"}, "Chroma": {"Chroma": "https://python.langchain.com/v0.2/docs/integrations/providers/.ipynb_checkpoints/chroma-checkpoint/", "You need the dgml-utils package to use the DocugamiLoader (run pip install directly without \"poetry run\" if you are not using poetry)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "Retrievers": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/index-checkpoint/"}, "RedisStore": {"redis.md": "https://python.langchain.com/v0.2/docs/integrations/stores/redis/"}, "InMemoryByteStore": {"in_memory.md": "https://python.langchain.com/v0.2/docs/integrations/stores/in_memory/", "Swapping the `ByteStore` {#swapping-the-bytestore}": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/caching_embeddings/", "The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/"}, "LocalFileStore": {"file_system.md": "https://python.langchain.com/v0.2/docs/integrations/stores/file_system/", "Swapping the `ByteStore` {#swapping-the-bytestore}": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/caching_embeddings/"}, "CacheBackedEmbeddings": {"astradb.md": "https://python.langchain.com/v0.2/docs/integrations/stores/astradb/", "Swapping the `ByteStore` {#swapping-the-bytestore}": "https://python.langchain.com/v0.2/docs/modules/data_connection/text_embedding/caching_embeddings/"}, "UpstashRedisByteStore": {"upstash_redis.md": "https://python.langchain.com/v0.2/docs/integrations/stores/upstash_redis/"}, "ConneryToolkit": {"Specify your Connery Runner credentials.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/connery/"}, "create_csv_agent": {"Create a dataframe": "https://python.langchain.com/v0.2/docs/integrations/toolkits/csv/"}, "create_xorbits_agent": {"xorbits.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/xorbits/"}, "JiraToolkit": {"jira.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/"}, "JiraAPIWrapper": {"jira.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/jira/"}, "create_spark_dataframe_agent": {"in apache-spark root directory. (tested here with \"spark-3.4.0-bin-hadoop3 and later\")": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark/"}, "PyPDFLoader": {"document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "TODO : Set values as per your requirements": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "astradb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "cassandra.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Clean up KDB.AI \"documents\" table and index for similarity search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "initialize MongoDB python client": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "merge_doc.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/merge_doc/", "google_cloud_storage_file.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_cloud_storage_file/", "PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "set_debug": {"document_comparison_toolkit.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/document_comparison_toolkit/", "System parameter in NIBittensorLLM is optional but you can set whatever you want to perform with model": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "textgen.md": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Debugging": "https://python.langchain.com/v0.2/docs/guides/development/debugging/"}, "PythonREPLTool": {"Define the neural network": "https://python.langchain.com/v0.2/docs/integrations/toolkits/python/"}, "create_pbi_agent": {"fictional example": "https://python.langchain.com/v0.2/docs/integrations/toolkits/powerbi/"}, "AzureCognitiveServicesToolkit": {"For Windows/Linux": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_cognitive_services/"}, "Requests": {"Select the LLM to use. Here, we use gpt-3.5-turbo-instruct": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/"}, "NLAToolkit": {"Select the LLM to use. Here, we use gpt-3.5-turbo-instruct": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/"}, "build_resource_service": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gmail/"}, "get_gmail_credentials": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gmail/"}, "SlackToolkit": {"Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/slack/"}, "SteamToolkit": {"steam.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/"}, "SteamWebAPIWrapper": {"steam.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/steam/"}, "create_json_agent": {"json.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/json/"}, "JsonToolkit": {"json.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/json/"}, "JsonSpec": {"json.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/json/", "NOTE: In this example. We must set `allow_dangerous_request=True` to enable the OpenAPI Agent to automatically use the Request Tool.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "AirbyteStripeLoader": {"airbyte_structured_qa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/airbyte_structured_qa/", "airbyte_stripe.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_stripe/"}, "create_pandas_dataframe_agent": {"airbyte_structured_qa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/airbyte_structured_qa/", "pandas.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/pandas/", "Using LangSmith is recommended but not required. Uncomment below lines to use.": "https://python.langchain.com/v0.2/docs/use_cases/sql/csv/"}, "GitHubToolkit": {"Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/"}, "GitHubAPIWrapper": {"Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/"}, "ConversationSummaryBufferMemory": {"Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "We can see here that there is a summary of the conversation and then some previous interactions": "https://python.langchain.com/v0.2/docs/modules/memory/types/summary_buffer/"}, "render_text_description_and_args": {"Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/github/", "Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/"}, "ClickupToolkit": {"Copilot Sandbox": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/"}, "ClickupAPIWrapper": {"Copilot Sandbox": "https://python.langchain.com/v0.2/docs/integrations/toolkits/clickup/"}, "create_spark_sql_agent": {"Note, you can also connect to Spark via Spark connect. For example:": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark_sql/"}, "SparkSQLToolkit": {"Note, you can also connect to Spark via Spark connect. For example:": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark_sql/"}, "SparkSQL": {"Note, you can also connect to Spark via Spark connect. For example:": "https://python.langchain.com/v0.2/docs/integrations/toolkits/spark_sql/"}, "PlayWrightBrowserToolkit": {"If this is your first time using playwright, you'll have to install a browser executable.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/"}, "create_async_playwright_browser": {"If this is your first time using playwright, you'll have to install a browser executable.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/playwright/"}, "create_conversational_retrieval_agent": {"cogniswitch.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cogniswitch/"}, "CogniswitchToolkit": {"cogniswitch.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/cogniswitch/"}, "NasaToolkit": {"nasa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/"}, "NasaAPIWrapper": {"nasa.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/nasa/"}, "MultionToolkit": {"Authorize connection to your Browser extention": "https://python.langchain.com/v0.2/docs/integrations/toolkits/multion/"}, "AmadeusToolkit": {"Set environmental variables here": "https://python.langchain.com/v0.2/docs/integrations/toolkits/amadeus/"}, "AzureAiServicesToolkit": {"azure_ai_services.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_ai_services/"}, "create_structured_chat_agent": {"azure_ai_services.md": "https://python.langchain.com/v0.2/docs/integrations/toolkits/azure_ai_services/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/structured_chat/"}, "reduce_openapi_spec": {"NOTE: In this example. We must set `allow_dangerous_request=True` to enable the OpenAPI Agent to automatically use the Request Tool.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "RequestsWrapper": {"NOTE: In this example. We must set `allow_dangerous_request=True` to enable the OpenAPI Agent to automatically use the Request Tool.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "create_openapi_agent": {"NOTE: In this example. We must set `allow_dangerous_request=True` to enable the OpenAPI Agent to automatically use the Request Tool.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "OpenAPIToolkit": {"NOTE: In this example. We must set `allow_dangerous_request=True` to enable the OpenAPI Agent to automatically use the Request Tool.": "https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi/"}, "GitLabToolkit": {"Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/"}, "GitLabAPIWrapper": {"Set your environment variables using os.environ": "https://python.langchain.com/v0.2/docs/integrations/toolkits/gitlab/"}, "PolygonToolkit": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/integrations/toolkits/polygon/"}, "ApacheDorisSettings": {"load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/"}, "DistanceStrategy": {"Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "# Automatically restart kernel after installs so that your environment can access the new packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_bigquery_vector_search/", "Create collection if running for the first time. If the collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/"}, "KineticaSettings": {"Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/"}, "SentenceTransformerEmbeddings": {"You need to install sqlite-vss as a dependency.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/", "docs[0].metadata[\"id\"] == \"id:testapp:testapp::32\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/", "import": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/"}, "Vald": {"Refresh is required for server use": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/"}, "RetrievalQAWithSourcesChain": {"install package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "cosine: distance metric": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "initialize marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "Uncomment this to install psychicapi if you don't already have it installed": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "Yellowbrick": {"Install all needed libraries": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/"}, "LLMRails": {"Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/llm_rails/"}, "HanaDB": {"Pip install necessary package": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/"}, "VectorSearchVectorStoreDatastore": {"TODO : Set values as per your requirements": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/"}, "VertexAI": {"TODO : Set values as per your requirements": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "google_vertex_ai_palm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/"}, "NucliaDB": {"nucliadb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/nucliadb/"}, "Hippo": {"openai": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/"}, "RedisText": {"connection to redis standalone at localhost, db 0, no password": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "RedisNum": {"connection to redis standalone at localhost, db 0, no password": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "RedisTag": {"connection to redis standalone at localhost, db 0, no password": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "RedisFilter": {"connection to redis standalone at localhost, db 0, no password": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "VespaStore": {"docs[0].metadata[\"id\"] == \"id:testapp:testapp::32\"": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/"}, "CosmosDBSimilarityType": {"Set up the OpenAI Environment Variables": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "CosmosDBVectorSearchType": {"Set up the OpenAI Environment Variables": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "NeuralDBVectorStore": {"From scratch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/thirdai_neuraldb/"}, "VikingDB": {"vikingdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/"}, "VikingDBConfig": {"vikingdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/"}, "InMemoryDocstore": {"default metric is angular": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Define your embedding model": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/time_weighted_vectorstore/", "Backed by a Vector Store": "https://python.langchain.com/v0.2/docs/modules/memory/types/vectorstore_retriever_memory/"}, "CouchbaseVectorStore": {"Wait until the cluster is ready for use.": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/"}, "VLite": {"Load the document and split it into chunks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vlite/"}, "DuckDB": {"duckdb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/"}, "StarRocksSettings": {"load text splitter and split docs into snippets of text": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/"}, "PathwayVectorClient": {"take into account only sources modified later than unix timestamp": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pathway/"}, "DocArrayHnswSearch": {"Get an OpenAI token: https://platform.openai.com/account/api-keys": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_hnsw/"}, "TileDB": {"tiledb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/"}, "EcloudESVectorStore": {"ecloud_vector_search.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/"}, "SurrealDBStore": {"%pip install --upgrade --quiet surrealdb langchain langchain-community": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/"}, "ElasticVectorSearch": {"Metadata {#metadata}": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/"}, "PGVecto_rs": {"Run tests with shell:": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/"}, "JSONLoader": {"Pip install necessary packages": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "JSON": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/json/"}, "CollectionConfig": {"Collection config is needed if we're creating a new Zep Collection": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/"}, "BaiduVectorDB": {"baiduvectordb.md": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/"}, "openai": {"openai-old.md": "https://python.langchain.com/v0.2/docs/integrations/adapters/openai-old/", "openai.md": "https://python.langchain.com/v0.2/docs/integrations/adapters/openai/"}, "AsyncChromiumLoader": {"Load HTML": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/beautiful_soup/", "async_chromium.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/async_chromium/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "BeautifulSoupTransformer": {"Load HTML": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/beautiful_soup/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "OpenVINOReranker": {"Helper function for printing docs": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/"}, "create_metadata_tagger": {"Must be an OpenAI model that supports functions": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/"}, "DoctranPropertyExtractor": {"doctran_extract_properties.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_extract_properties/"}, "DoctranQATransformer": {"doctran_interrogate_document.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_interrogate_document/"}, "CrossEncoderReranker": {"OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/"}, "HuggingFaceCrossEncoder": {"OR (depending on Python version)": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/"}, "DoctranTextTranslator": {"doctran_translate_document.md": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_translate_document/"}, "XorbitsLoader": {"Use lazy load for larger table, which won't read the full table into memory": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/xorbits/"}, "OutlookMessageLoader": {"email.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/email/"}, "TranscriptFormat": {"or a local file path: audio_file = \"./nbc.mp3\"": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/assemblyai/"}, "AirbyteSalesforceLoader": {"airbyte_salesforce.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_salesforce/"}, "AirbyteCDKLoader": {"airbyte_cdk.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_cdk/"}, "Docx2txtLoader": {"microsoft_word.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_word/"}, "RSpaceLoader": {"rspace.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rspace/"}, "SeleniumURLLoader": {"url.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/url/"}, "PlaywrightURLLoader": {"url.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/url/"}, "AirbyteJSONLoader": {"airbyte_json.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_json/"}, "GeoDataFrameLoader": {"Load Open City Data": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/geopandas/"}, "AirbyteTypeformLoader": {"airbyte_typeform.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_typeform/"}, "MHTMLLoader": {"Create a new loader object for the MHTML file": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mhtml/"}, "NewsURLLoader": {"news.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/news/"}, "ImageCaptionLoader": {"image_captions.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image_captions/"}, "LLMSherpaFileLoader": {"Install package": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/llmsherpa/"}, "NucliaLoader": {"nuclia.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/nuclia/"}, "TomlLoader": {"toml.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/toml/"}, "PsychicLoader": {"Uncomment this to install psychicapi if you don't already have it installed": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/"}, "FireCrawlLoader": {"firecrawl.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/firecrawl/", "HTML": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/html/"}, "FakeListLLM": {"see https://python.langchain.com/v0.2/docs/use_cases/summarization for more details": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/larksuite/", "Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "MergedDataLoader": {"merge_doc.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/merge_doc/"}, "RecursiveUrlLoader": {"Parameters {#parameters}": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/recursive_url/"}, "AirbyteHubspotLoader": {"airbyte_hubspot.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_hubspot/"}, "AirbyteGongLoader": {"airbyte_gong.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_gong/"}, "ReadTheDocsLoader": {"readthedocs_documentation.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/readthedocs_documentation/"}, "PolarsDataFrameLoader": {"Use lazy load for larger table, which won't read the full table into memory": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/polars_dataframe/"}, "DataFrameLoader": {"Use lazy load for larger table, which won't read the full table into memory": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pandas_dataframe/"}, "SurrealDBLoader": {"%pip install --upgrade --quiet surrealdb langchain langchain-community": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/surrealdb/"}, "GoogleApiClient": {"Init the GoogleApiClient": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_transcript/"}, "ConcurrentLoader": {"concurrent.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/concurrent/"}, "RSSFeedLoader": {"rss.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rss/"}, "PebbloSafeLoader": {"pebblo.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pebblo/"}, "VsdxLoader": {"vsdx.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/vsdx/"}, "NotebookLoader": {"jupyter_notebook.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/jupyter_notebook/"}, "OracleAutonomousDatabaseLoader": {"oracleadb_loader.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/oracleadb_loader/"}, "LanguageParser": {"Code for: class MyClass:": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/"}, "Language": {"Code for: class MyClass:": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "Full list of supported languages": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/code_splitter/"}, "SRTLoader": {"subtitle.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/subtitle/"}, "MastodonTootsLoader": {"Or set up access information to use a Mastodon app.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mastodon/"}, "AirbyteShopifyLoader": {"airbyte_shopify.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_shopify/"}, "GlueCatalogLoader": {"glue_catalog.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/glue_catalog/"}, "PySparkDataFrameLoader": {"pyspark_dataframe.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pyspark_dataframe/"}, "AirbyteZendeskSupportLoader": {"airbyte_zendesk_support.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_zendesk_support/"}, "CoNLLULoader": {"conll-u.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/conll-u/"}, "MongodbLoader": {"add this import for running in jupyter notebook": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mongodb/"}, "SitemapLoader": {"fixes a bug with asyncio and jupyter": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/sitemap/"}, "YuqueLoader": {"yuque.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/yuque/"}, "QuipLoader": {"quip.md": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/quip/"}, "MemgraphGraph": {"Creating and executing the seeding query": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/"}, "GraphSparqlQAChain": {"rdflib_sparql.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/rdflib_sparql/"}, "RdfGraph": {"rdflib_sparql.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/rdflib_sparql/"}, "NebulaGraphQAChain": {"connect ngql jupyter extension to nebulagraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/nebula_graph/"}, "NebulaGraph": {"connect ngql jupyter extension to nebulagraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/nebula_graph/"}, "GremlinQAChain": {"The underlying python-gremlin has a problem when running in notebook": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "GremlinGraph": {"The underlying python-gremlin has a problem when running in notebook": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "GraphDocument": {"The underlying python-gremlin has a problem when running in notebook": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "Node": {"The underlying python-gremlin has a problem when running in notebook": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "Relationship": {"The underlying python-gremlin has a problem when running in notebook": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "GraphIndexCreator": {"networkx.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/"}, "GraphQAChain": {"networkx.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/"}, "NetworkxEntityGraph": {"networkx.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/"}, "HugeGraphQAChain": {"graph.refresh_schema()": "https://python.langchain.com/v0.2/docs/integrations/graphs/hugegraph/"}, "HugeGraph": {"graph.refresh_schema()": "https://python.langchain.com/v0.2/docs/integrations/graphs/hugegraph/"}, "AGEGraph": {"How many people played in Top Gun?": "https://python.langchain.com/v0.2/docs/integrations/graphs/apache_age/"}, "NeptuneSparqlQAChain": {"Optionally change the schema": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_sparql/"}, "NeptuneRdfGraph": {"Optionally change the schema": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_sparql/"}, "NeptuneGraph": {"amazon_neptune_open_cypher.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/"}, "NeptuneAnalyticsGraph": {"amazon_neptune_open_cypher.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/"}, "NeptuneOpenCypherQAChain": {"amazon_neptune_open_cypher.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/"}, "KuzuQAChain": {"graph.refresh_schema()": "https://python.langchain.com/v0.2/docs/integrations/graphs/kuzu_db/"}, "KuzuGraph": {"graph.refresh_schema()": "https://python.langchain.com/v0.2/docs/integrations/graphs/kuzu_db/"}, "FalkorDBQAChain": {"falkordb.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/falkordb/"}, "FalkorDBGraph": {"falkordb.md": "https://python.langchain.com/v0.2/docs/integrations/graphs/falkordb/"}, "ConversationBufferWindowMemory": {"Setup {#setup}": "https://python.langchain.com/v0.2/docs/integrations/llms/baseten/", "install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Conversation Buffer Window": "https://python.langchain.com/v0.2/docs/modules/memory/types/buffer_window/"}, "Solar": {"solar.md": "https://python.langchain.com/v0.2/docs/integrations/llms/solar/"}, "IpexLLM": {"Update Langchain": "https://python.langchain.com/v0.2/docs/integrations/llms/ipex_llm/"}, "SagemakerEndpoint": {"sagemaker.md": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/", "Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "LLMContentHandler": {"sagemaker.md": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/", "Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "OctoAIEndpoint": {"octoai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/octoai/"}, "TextGen": {"textgen.md": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/"}, "MosaicML": {"sign up for an account: https://forms.mosaicml.com/demo?utm_source=langchain": "https://python.langchain.com/v0.2/docs/integrations/llms/mosaicml/"}, "VolcEngineMaasLLM": {"Install the package": "https://python.langchain.com/v0.2/docs/integrations/llms/volcengine_maas/"}, "KoboldApiLLM": {"koboldai.md": "https://python.langchain.com/v0.2/docs/integrations/llms/koboldai/"}, "Konko": {"konko.md": "https://python.langchain.com/v0.2/docs/integrations/llms/konko/"}, "AsyncCallbackHandler": {"Guardrails for Amazon Bedrock with trace": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/", "To enable streaming, we pass in `streaming=True` to the ChatModel constructor": "https://python.langchain.com/v0.2/docs/modules/callbacks/async_callbacks/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/"}, "set_verbose": {"install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Debugging": "https://python.langchain.com/v0.2/docs/guides/development/debugging/"}, "OpaquePrompts": {"install the opaqueprompts and langchain packages": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/"}, "TitanTakeoff": {"Note importing TitanTakeoffPro instead of TitanTakeoff will work as well both use same object under the hood": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/"}, "Friendli": {"friendli.md": "https://python.langchain.com/v0.2/docs/integrations/llms/friendli/"}, "Databricks": {"If running a Databricks notebook attached to an interactive cluster in \"single user\"": "https://python.langchain.com/v0.2/docs/integrations/llms/databricks/"}, "LMFormatEnforcer": {"lmformatenforcer_experimental.md": "https://python.langchain.com/v0.2/docs/integrations/llms/lmformatenforcer_experimental/"}, "VLLM": {"vllm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/"}, "VLLMOpenAI": {"vllm.md": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/"}, "CustomOpenAIContentFormatter": {"azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "ContentFormatterBase": {"azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "DollyContentFormatter": {"azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "load_llm": {"azure_ml.md": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "MapReduceChain": {"Map reduce example": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/"}, "ModelLaboratory": {"Map reduce example": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/"}, "RELLM": {"We'll choose a regex that matches to a structured json string that looks like:": "https://python.langchain.com/v0.2/docs/integrations/llms/rellm_experimental/"}, "Yuan2": {"default infer_api for a local deployed Yuan2.0 inference server": "https://python.langchain.com/v0.2/docs/integrations/llms/yuan2/"}, "InMemoryCache": {"To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/llm_caching/", "": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/chat_model_caching/"}, "GPTCache": {"To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "SQLAlchemyCache": {"To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "AzureCosmosDBSemanticCache": {"To make the caching really obvious, lets use a slower model.": "https://python.langchain.com/v0.2/docs/integrations/llms/llm_caching/"}, "SparkLLM": {"Load the model": "https://python.langchain.com/v0.2/docs/integrations/llms/sparkllm/"}, "Moonshot": {"Generate your api key from: https://platform.moonshot.cn/console/api-keys": "https://python.langchain.com/v0.2/docs/integrations/llms/moonshot/"}, "OpenLM": {"Uncomment to install openlm and openai if you haven't already": "https://python.langchain.com/v0.2/docs/integrations/llms/openlm/"}, "CloudflareWorkersAI": {"Using streaming": "https://python.langchain.com/v0.2/docs/integrations/llms/cloudflare_workersai/"}, "ChatGLM3": {"Install required dependencies": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/"}, "ChatGLM": {"Install required dependencies": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/"}, "Llamafile": {"llamafile.md": "https://python.langchain.com/v0.2/docs/integrations/llms/llamafile/", "Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/", "Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/"}, "LayerupSecurity": {"Layerup Security": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/layerup_security/"}, "JsonFormer": {"jsonformer_experimental.md": "https://python.langchain.com/v0.2/docs/integrations/llms/jsonformer_experimental/"}, "WeightOnlyQuantPipeline": {"weight_only_quantization.md": "https://python.langchain.com/v0.2/docs/integrations/llms/weight_only_quantization/"}, "Replicate": {"magics to auto-reload external modules in case you are making changes to langchain while working on this notebook": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/"}, "create_history_aware_retriever": {"Quickstart": "https://python.langchain.com/v0.2/docs/get_started/quickstart/", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/code_understanding/", "import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/chat_history/"}, "BaseOutputParser": {"Quickstart": "https://python.langchain.com/v0.2/docs/get_started/.ipynb_checkpoints/quickstart-checkpoint/", "The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/"}, "ConditionalPromptSelector": {"Download a llamafile from HuggingFace": "https://python.langchain.com/v0.2/docs/guides/development/local_llms/"}, "DatetimeOutputParser": {"Note that we set max_retries = 0 to avoid retrying on RateLimits, etc": "https://python.langchain.com/v0.2/docs/guides/productionization/fallbacks/", "datetime.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/datetime/"}, "HuggingFaceInjectionIdentifier": {"Using https://huggingface.co/laiyer/deberta-v3-base-prompt-injection": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/hugging_face_prompt_injection/"}, "load_chain": {"Using https://huggingface.co/laiyer/deberta-v3-base-prompt-injection": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/hugging_face_prompt_injection/"}, "FallacyChain": {"Logical Fallacy chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/logical_fallacy_chain/"}, "ModerationPiiError": {"Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "BaseModerationConfig": {"Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "ModerationPiiConfig": {"Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "ModerationPromptSafetyConfig": {"Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "ModerationToxicityConfig": {"Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "BaseModerationCallbackHandler": {"Define callback handlers by subclassing BaseModerationCallbackHandler": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/amazon_comprehend_chain/"}, "ConstitutionalChain": {"Constitutional chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/constitutional_chain/"}, "ConstitutionalPrinciple": {"Constitutional chain": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/constitutional_chain/"}, "format_document": {"QA with private data protection {#qa-with-private-data-protection}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/"}, "runnable": {"Multi-language data anonymization with Microsoft Presidio {#multi-language-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/multi_language/"}, "case_insensitive_matching_strategy": {"Reversible data anonymization with Microsoft Presidio {#reversible-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/reversible/"}, "fuzzy_matching_strategy": {"Reversible data anonymization with Microsoft Presidio {#reversible-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/reversible/"}, "combined_exact_fuzzy_matching_strategy": {"Reversible data anonymization with Microsoft Presidio {#reversible-data-anonymization-with-microsoft-presidio}": "https://python.langchain.com/v0.2/docs/guides/productionization/safety/presidio_data_anonymization/reversible/"}, "load_evaluator": {"Initialize the language model": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/examples/comparisons/", "ANTHROPIC_API_KEY=": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/trajectory_eval/", "pairwise_embedding_distance.md": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/comparison/pairwise_embedding_distance/", "The prompt was assigned to the evaluator": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/comparison/pairwise_string/", "This is equivalent to loading using the enum": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/criteria_eval_chain/", "Check for the presence of a YYYY-MM-DD string.": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/regex_match/", "Correct": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/scoring_eval_chain/", "Alternatively": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/exact_match/", "The results purely character-based, so it's less useful when negation is concerned": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/string_distance/", "You can load by enum or by raw python string": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/embedding_distance/"}, "load_dataset": {"Initialize the language model": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/examples/comparisons/"}, "AgentTrajectoryEvaluator": {"custom.md": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/trajectory/custom/"}, "EmbeddingDistance": {"pairwise_embedding_distance.md": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/comparison/pairwise_embedding_distance/", "You can load by enum or by raw python string": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/embedding_distance/"}, "PairwiseStringEvaluator": {"%env ANTHROPIC_API_KEY=YOUR_API_KEY": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/comparison/custom/"}, "Criteria": {"This is equivalent to loading using the enum": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/criteria_eval_chain/"}, "JsonValidityEvaluator": {"Equivalently": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/json/"}, "JsonEqualityEvaluator": {"Equivalently": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/json/"}, "JsonEditDistanceEvaluator": {"Equivalently": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/json/"}, "JsonSchemaEvaluator": {"Equivalently": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/json/"}, "RegexMatchStringEvaluator": {"Check for the presence of a YYYY-MM-DD string.": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/regex_match/"}, "StringEvaluator": {"The perplexity is much higher since LangChain was introduced after 'gpt-2' was released and because it is never used in the following context.": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/custom/"}, "ExactMatchStringEvaluator": {"Alternatively": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/exact_match/"}, "StringDistance": {"The results purely character-based, so it's less useful when negation is concerned": "https://python.langchain.com/v0.2/docs/guides/productionization/evaluation/string/string_distance/"}, "WebResearchRetriever": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/web_scraping/"}, "StuffDocumentsChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/", "Get embeddings.": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/long_context_reorder/"}, "MapReduceDocumentsChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/"}, "ReduceDocumentsChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/"}, "AnalyzeDocumentChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/v0.2/docs/use_cases/summarization/"}, "get_openapi_chain": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/apis/"}, "APIChain": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/apis/"}, "open_meteo_docs": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/apis/"}, "tmdb_docs": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/apis/"}, "podcast_docs": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/apis/"}, "LLMRequestsChain": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/apis/"}, "FewShotPromptTemplate": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/", "Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "Select the most similar example to the input.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples/", "Examples of a pretend task of creating antonyms.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/similarity/", "index.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/index/", "Examples of a fictional translation task.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/ngram_overlap/"}, "OPENAI_TEMPLATE": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/"}, "create_openai_data_generator": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/"}, "DatasetGenerator": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/"}, "create_data_generation_chain": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/"}, "create_extraction_chain_pydantic": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/"}, "PydanticOutputParser": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/data_generation/", "Set up a parser": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/parse/", "Build a sample vectorDB": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/MultiQueryRetriever/", "Define your desired data structure.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/pydantic/", "retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/", "output_fixing.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/output_fixing/"}, "create_tool_calling_agent": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/agents/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Construct the Tools agent": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/tool_calling/", "!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/"}, "Runnable": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/human_in_the_loop/"}, "RunnableConfig": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/", "Run custom functions {#run-custom-functions}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/functions/"}, "ToolCall": {"os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/tool_error_handling/"}, "JsonOutputParser": {"If you'd like to use LangSmith, uncomment the below:": "https://python.langchain.com/v0.2/docs/use_cases/tool_use/prompting/", "Define your desired data structure.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/json/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/"}, "ConfigurableField": {"This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/", "initialize the bm25 retriever and faiss retriever": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/ensemble/", "batch_configurable_chain([\"ice cream\", \"spaghetti\", \"dumplings\"])": "https://python.langchain.com/v0.2/docs/expression_language/why/", "Configure chain internals at runtime {#configure-chain-internals-at-runtime}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/configure/"}, "RunnableBinding": {"This will only get documents for Ankush": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/per_user/"}, "RunnablePick": {"Make sure the model path is correct for your system!": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/local_retrieval_qa/"}, "ChatMessageHistory": {"import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/chat_history/", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/memory_management/", "agent_with_memory.md": "https://python.langchain.com/v0.2/docs/modules/memory/agent_with_memory/", "Conversation Summary": "https://python.langchain.com/v0.2/docs/modules/memory/types/summary/", "Chat Messages": "https://python.langchain.com/v0.2/docs/modules/memory/chat_messages/index/", "Quickstart {#quickstart}": "https://python.langchain.com/v0.2/docs/modules/agents/quick_start/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/"}, "BaseChatMessageHistory": {"import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/chat_history/", "Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/"}, "LogStreamCallbackHandler": {"import dotenv": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/streaming/"}, "JsonOutputKeyToolsParser": {"Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "Using LangSmith is recommended but not required. Uncomment below lines to use.": "https://python.langchain.com/v0.2/docs/use_cases/sql/csv/", "openai_tools.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_tools/"}, "ChatAnthropicMessages": {"Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/"}, "XMLOutputParser": {"Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "xml.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/xml/"}, "EmbeddingsFilter": {"Uncomment if you want to log to LangSmith": "https://python.langchain.com/v0.2/docs/use_cases/question_answering/citations/", "Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/"}, "PydanticToolsParser": {"%pip install -qU langchain langchain-community langchain-openai langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/no_queries/", "%pip install -qU langchain langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/hyde/", "%pip install -qU langchain-core langchain-openai": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/techniques/step_back/", "Tool calling {#tool-calling}": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/function_calling/", "Function calling": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/function_calling-checkpoint/", "openai_tools.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_tools/"}, "chain": {"%pip install -qU langchain langchain-community langchain-openai langchain-chroma": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/no_queries/", "Streaming With LangChain {#streaming-with-langchain}": "https://python.langchain.com/v0.2/docs/expression_language/streaming/", "decorator.md": "https://python.langchain.com/v0.2/docs/expression_language/how_to/decorator/"}, "Comparator": {"constructing-filters.md": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/constructing-filters/"}, "Comparison": {"constructing-filters.md": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/constructing-filters/"}, "Operation": {"constructing-filters.md": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/constructing-filters/"}, "Operator": {"constructing-filters.md": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/constructing-filters/"}, "StructuredQuery": {"constructing-filters.md": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/constructing-filters/"}, "ChromaTranslator": {"constructing-filters.md": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/constructing-filters/", "This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/"}, "ElasticsearchTranslator": {"constructing-filters.md": "https://python.langchain.com/v0.2/docs/use_cases/query_analysis/how_to/constructing-filters/"}, "LLMGraphTransformer": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/constructing/"}, "CypherQueryCorrector": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/mapping/"}, "Schema": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/mapping/"}, "AsyncCallbackManagerForToolRun": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "Import things that are needed generically": "https://python.langchain.com/v0.2/docs/modules/tools/custom_tools/"}, "CallbackManagerForToolRun": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "Import things that are needed generically": "https://python.langchain.com/v0.2/docs/modules/tools/custom_tools/"}, "BaseTool": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "Import things that are needed generically": "https://python.langchain.com/v0.2/docs/modules/tools/custom_tools/", "Function calling": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/function_calling-checkpoint/"}, "format_to_openai_function_messages": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/"}, "OpenAIFunctionsAgentOutputParser": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/"}, "convert_to_openai_function": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/graph/semantic/", "tools_as_openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/tools/tools_as_openai_functions/"}, "SemanticSimilarityExampleSelector": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "Select the most similar example to the input.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples/", "This is a prompt template used to format each individual example.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/few_shot_examples_chat/", "Examples of a pretend task of creating antonyms.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/similarity/"}, "RunnableBranch": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/v0.2/docs/use_cases/chatbots/quickstart/", "Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/"}, "BSHTMLLoader": {"Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/", "HTML": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/html/"}, "create_structured_output_runnable": {"Download the content": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_long_text/"}, "BS4HTMLParser": {"Configure the parsers that you want to use per mime-type!": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_files/"}, "PDFMinerParser": {"Configure the parsers that you want to use per mime-type!": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_files/"}, "MimeTypeBasedParser": {"Configure the parsers that you want to use per mime-type!": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_files/"}, "TextParser": {"Configure the parsers that you want to use per mime-type!": "https://python.langchain.com/v0.2/docs/use_cases/extraction/how_to/handle_files/"}, "PythonAstREPLTool": {"Using LangSmith is recommended but not required. Uncomment below lines to use.": "https://python.langchain.com/v0.2/docs/use_cases/sql/csv/"}, "create_sql_query_chain": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/prompting/", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()": "https://python.langchain.com/v0.2/docs/use_cases/sql/large_db/"}, "QuerySQLDataBaseTool": {"Uncomment the below to use LangSmith. Not required.": "https://python.langchain.com/v0.2/docs/use_cases/sql/quickstart/"}, "SQLRecordManager": {"indexing.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/indexing/"}, "index": {"indexing.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/indexing/"}, "BaseLoader": {"indexing.md": "https://python.langchain.com/v0.2/docs/modules/data_connection/indexing/", "Custom Document Loader {#custom-document-loader}": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/custom/"}, "EnsembleRetriever": {"initialize the bm25 retriever and faiss retriever": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/ensemble/"}, "JsonKeyOutputFunctionsParser": {"The vectorstore to use to index the child chunks": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/multi_vector/", "openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_functions/", "prompt_llm_parser.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_llm_parser/"}, "LLMChainExtractor": {"Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/"}, "LLMChainFilter": {"Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/"}, "DocumentCompressorPipeline": {"Helper function for printing docs": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/contextual_compression/"}, "CallbackManagerForRetrieverRun": {"Custom Retriever {#custom-retriever}": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/custom_retriever/"}, "BaseRetriever": {"Custom Retriever {#custom-retriever}": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/custom_retriever/"}, "TimeWeightedVectorStoreRetriever": {"Define your embedding model": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/time_weighted_vectorstore/"}, "mock_now": {"Define your embedding model": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/time_weighted_vectorstore/"}, "ParentDocumentRetriever": {"This text splitter is used to create the child documents": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/parent_document_retriever/"}, "StructuredQueryOutputParser": {"This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/"}, "get_query_constructor_prompt": {"This example only specifies a filter": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/self_query/"}, "Pinecone": {"Self-querying": "https://python.langchain.com/v0.2/docs/modules/data_connection/retrievers/.ipynb_checkpoints/self_query-checkpoint/"}, "RecursiveJsonSplitter": {"This is a large nested json object and will be loaded as a python dict": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/recursive_json_splitter/"}, "HTMLHeaderTextSplitter": {"for local file use html_splitter.split_text_from_file()": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/HTML_header_metadata/"}, "SemanticChunker": {"This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/semantic-chunker/"}, "SentenceTransformersTokenTextSplitter": {"This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/split_by_token/"}, "NLTKTextSplitter": {"This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/split_by_token/"}, "KonlpyTextSplitter": {"This is a long document we can split up.": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/split_by_token/"}, "MarkdownHeaderTextSplitter": {"MD splits": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/markdown_header_metadata/"}, "HTMLSectionSplitter": {"Split": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_transformers/HTML_section_aware_splitter/"}, "BaseBlobParser": {"Custom Document Loader {#custom-document-loader}": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/custom/"}, "FileSystemBlobLoader": {"Custom Document Loader {#custom-document-loader}": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/custom/"}, "MathpixPDFLoader": {"PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "PyPDFium2Loader": {"PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "PDFMinerLoader": {"PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "PDFMinerPDFasHTMLLoader": {"PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "PyMuPDFLoader": {"PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "PyPDFDirectoryLoader": {"PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "PDFPlumberLoader": {"PDF": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/pdf/"}, "PythonLoader": {"File Directory": "https://python.langchain.com/v0.2/docs/modules/data_connection/document_loaders/file_directory/"}, "ToolException": {"Import things that are needed generically": "https://python.langchain.com/v0.2/docs/modules/tools/custom_tools/"}, "MoveFileTool": {"tools_as_openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/tools/tools_as_openai_functions/"}, "BaseMemory": {"!python -m spacy download en_core_web_lg": "https://python.langchain.com/v0.2/docs/modules/memory/custom_memory/"}, "CombinedMemory": {"Combined": "https://python.langchain.com/v0.2/docs/modules/memory/multiple_memory/"}, "ConversationSummaryMemory": {"Combined": "https://python.langchain.com/v0.2/docs/modules/memory/multiple_memory/", "Conversation Summary": "https://python.langchain.com/v0.2/docs/modules/memory/types/summary/"}, "ConversationKGMemory": {"kg.md": "https://python.langchain.com/v0.2/docs/modules/memory/types/kg/"}, "ConversationTokenBufferMemory": {"We can see here that the buffer is updated": "https://python.langchain.com/v0.2/docs/modules/memory/types/token_buffer/"}, "ConversationEntityMemory": {"Entity": "https://python.langchain.com/v0.2/docs/modules/memory/types/entity_summary_memory/"}, "ENTITY_MEMORY_CONVERSATION_TEMPLATE": {"Entity": "https://python.langchain.com/v0.2/docs/modules/memory/types/entity_summary_memory/"}, "VectorStoreRetrieverMemory": {"Backed by a Vector Store": "https://python.langchain.com/v0.2/docs/modules/memory/types/vectorstore_retriever_memory/"}, "BaseCallbackHandler": {"To enable streaming, we pass in `streaming=True` to the ChatModel constructor": "https://python.langchain.com/v0.2/docs/modules/callbacks/async_callbacks/", "First, define custom callback handler implementations": "https://python.langchain.com/v0.2/docs/modules/callbacks/multiple_callbacks/"}, "FileCallbackHandler": {"this chain will both print to stdout (because verbose=True) and write to 'output.log'": "https://python.langchain.com/v0.2/docs/modules/callbacks/filecallbackhandler/"}, "LLMResult": {"To enable streaming, we pass in `streaming=True` to the ChatModel constructor": "https://python.langchain.com/v0.2/docs/modules/callbacks/async_callbacks/", "Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/"}, "create_xml_agent": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/xml_agent/"}, "XMLAgentOutputParser": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/xml_agent/"}, "create_self_ask_with_search_agent": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/self_ask_with_search/"}, "TavilyAnswer": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/self_ask_with_search/"}, "OpenAIAssistantRunnable": {"openai_assistants.md": "https://python.langchain.com/v0.2/docs/modules/agents/agent_types/openai_assistants/"}, "AgentActionMessageLog": {"Load in document to retrieve over": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_structured/"}, "LLMMathChain": {"need to use GPT-4 here as GPT-3.5 does not understand, however hard you insist, that": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/agent_iter/"}, "ChatGenerationChunk": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/", "custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "GenerationChunk": {"Get the prompt to use - you can modify this!": "https://python.langchain.com/v0.2/docs/modules/agents/how_to/streaming/", "custom_llm.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/custom_llm/"}, "CommaSeparatedListOutputParser": {"Quickstart": "https://python.langchain.com/v0.2/docs/modules/model_io/.ipynb_checkpoints/quick_start-checkpoint/", "Model I/O": "https://python.langchain.com/v0.2/docs/modules/model_io/index/", "csv.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/csv/"}, "get_bedrock_anthropic_callback": {"!pip install -qU langchain-openai": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/token_usage_tracking/"}, "AIMessageChunk": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/", "The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/"}, "FunctionMessageChunk": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "HumanMessageChunk": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "SystemMessageChunk": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "ToolMessageChunk": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "AsyncCallbackManagerForLLMRun": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "CallbackManagerForLLMRun": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/", "custom_llm.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/custom_llm/"}, "SimpleChatModel": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "ChatGeneration": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/", "The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/"}, "ChatResult": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "run_in_executor": {"custom_chat_model.md": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/custom_chat_model/"}, "AIMessagePromptTemplate": {"Prompts": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/prompts-checkpoint/"}, "JsonOutputToolsParser": {"Function calling": "https://python.langchain.com/v0.2/docs/modules/model_io/chat/.ipynb_checkpoints/function_calling-checkpoint/", "openai_tools.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_tools/"}, "RunnableGenerator": {"The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/"}, "OutputParserException": {"The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/"}, "BaseGenerationOutputParser": {"The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/"}, "Generation": {"The [bool] desribes a parameterization of a generic.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/custom/"}, "SimpleJsonOutputParser": {"Define your desired data structure.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/quick_start/"}, "ResponseSchema": {"structured.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/structured/"}, "StructuredOutputParser": {"structured.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/structured/"}, "YamlOutputParser": {"Define your desired data structure.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/yaml/"}, "OutputFixingParser": {"retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/", "output_fixing.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/output_fixing/"}, "RetryOutputParser": {"retry.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/retry/"}, "EnumOutputParser": {"enum.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/enum/"}, "JsonOutputFunctionsParser": {"openai_functions.md": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/openai_functions/", "prompt_llm_parser.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_llm_parser/"}, "PandasDataFrameOutputParser": {"Solely for documentation purposes.": "https://python.langchain.com/v0.2/docs/modules/model_io/output_parsers/types/pandas_dataframe/"}, "PipelinePromptTemplate": {"composition.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/composition/"}, "ChatMessagePromptTemplate": {"Quick reference {#quick-reference}": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/quick_start/"}, "MaxMarginalRelevanceExampleSelector": {"Examples of a pretend task of creating antonyms.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/mmr/"}, "LengthBasedExampleSelector": {"Examples of a pretend task of creating antonyms.": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/length_based/"}, "BaseExampleSelector": {"index.md": "https://python.langchain.com/v0.2/docs/modules/model_io/prompts/example_selectors/index/"}, "LLM": {"custom_llm.md": "https://python.langchain.com/v0.2/docs/modules/model_io/llms/custom_llm/"}, "ChatPromptValue": {"prompt_size.md": "https://python.langchain.com/v0.2/docs/expression_language/cookbook/prompt_size/"}, "cosine_similarity": {"Dynamically route logic based on input {#dynamically-route-logic-based-on-input}": "https://python.langchain.com/v0.2/docs/expression_language/how_to/routing/"}, "ConfigurableFieldSpec": {"Remembers": "https://python.langchain.com/v0.2/docs/expression_language/how_to/message_history/"}, "HubRunnable": {"Configure chain internals at runtime {#configure-chain-internals-at-runtime}": "https://python.langchain.com/v0.2/docs/expression_language/primitives/configure/"}} +{"ChatPromptTemplate": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "Conceptual guide": "https://python.langchain.com/v0.2/docs/concepts/", "# Example": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_rerank_docs_chain/", "# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/llm_router_chain/", "Load docs": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/conversation_retrieval_chain/", "# Basic example (short documents)": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_reduce_chain/", "How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to add values to a chain's state": "https://python.langchain.com/v0.2/docs/how_to/assign/", "How to do per-user retrieval": "https://python.langchain.com/v0.2/docs/how_to/qa_per_user/", "How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "How to create a custom LLM class": "https://python.langchain.com/v0.2/docs/how_to/custom_llm/", "How to inspect runnables": "https://python.langchain.com/v0.2/docs/how_to/inspect/", "How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to use few shot examples in chat models": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples_chat/", "How to summarize text through iterative refinement": "https://python.langchain.com/v0.2/docs/how_to/summarize_refine/", "How to do tool/function calling": "https://python.langchain.com/v0.2/docs/how_to/function_calling/", "How to create tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/", "How to use prompting alone (no tool calling) to do extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_parse/", "How to deal with large databases when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_large_db/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to add fallbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/fallbacks/", "How to propagate callbacks constructor": "https://python.langchain.com/v0.2/docs/how_to/callbacks_constructor/", "How to map values to a graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_mapping/", "How to save and load LangChain objects": "https://python.langchain.com/v0.2/docs/how_to/serialization/", "How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to summarize text through parallelization": "https://python.langchain.com/v0.2/docs/how_to/summarize_map_reduce/", "How to attach callbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/callbacks_attach/", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to add default invocation args to a Runnable": "https://python.langchain.com/v0.2/docs/how_to/binding/", "How to convert Runnables as Tools": "https://python.langchain.com/v0.2/docs/how_to/convert_runnable_to_tool/", "How to stream events from a tool": "https://python.langchain.com/v0.2/docs/how_to/tool_stream_events/", "How to create a dynamic (self-constructing) chain": "https://python.langchain.com/v0.2/docs/how_to/dynamic_chain/", "How to create custom callback handlers": "https://python.langchain.com/v0.2/docs/how_to/custom_callbacks/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to invoke runnables in parallel": "https://python.langchain.com/v0.2/docs/how_to/parallel/", "How to pass through arguments from one step to the next": "https://python.langchain.com/v0.2/docs/how_to/passthrough/", "How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add message history": "https://python.langchain.com/v0.2/docs/how_to/message_history/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "How to run custom functions": "https://python.langchain.com/v0.2/docs/how_to/functions/", "How to add memory to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_memory/", "How deal with high cardinality categoricals when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_high_cardinality/", "How to return structured data from a model": "https://python.langchain.com/v0.2/docs/how_to/structured_output/", "How to add ad-hoc tool calling capability to LLMs and Chat Models": "https://python.langchain.com/v0.2/docs/how_to/tools_prompting/", "LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/", "How to debug your LLM apps": "https://python.langchain.com/v0.2/docs/how_to/debugging/", "How to chain runnables": "https://python.langchain.com/v0.2/docs/how_to/sequence/", "Hybrid Search": "https://python.langchain.com/v0.2/docs/how_to/hybrid/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "How to do query validation as part of SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_query_checking/", "How to summarize text in a single LLM call": "https://python.langchain.com/v0.2/docs/how_to/summarize_stuff/", "How to use multimodal prompts": "https://python.langchain.com/v0.2/docs/how_to/multimodal_prompts/", "How to use few-shot prompting with tool calling": "https://python.langchain.com/v0.2/docs/how_to/tools_few_shot/", "How to pass callbacks in at runtime": "https://python.langchain.com/v0.2/docs/how_to/callbacks_runtime/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "LangSmith LLM Runs": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/", "iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "NVIDIA NIMs ": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "AzureAISearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "You.com": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "AskNews": "https://python.langchain.com/v0.2/docs/integrations/retrievers/asknews/", "WikipediaRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/wikipedia/", "TavilySearchAPIRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tavily/", "Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "ArxivRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/arxiv/", "ElasticsearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/", "Google Vertex AI Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/google_vertex_ai_search/", "Tavily Search": "https://python.langchain.com/v0.2/docs/integrations/tools/tavily_search/", "FinancialDatasets Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/financial_datasets/", "Databricks Unity Catalog (UC)": "https://python.langchain.com/v0.2/docs/integrations/tools/databricks/", "Riza Code Interpreter": "https://python.langchain.com/v0.2/docs/integrations/tools/riza/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "Google SQL for MySQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mysql/", "Google AlloyDB for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_alloydb/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "AWS DynamoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/aws_dynamodb/", "Couchbase": "https://python.langchain.com/v0.2/docs/integrations/memory/couchbase_chat_message_history/", "MongoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/mongodb_chat_message_history/", "SQL (SQLAlchemy)": "https://python.langchain.com/v0.2/docs/integrations/memory/sql_chat_message_history/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "Google El Carro Oracle": "https://python.langchain.com/v0.2/docs/integrations/memory/google_el_carro/", "SQLite": "https://python.langchain.com/v0.2/docs/integrations/memory/sqlite/", "Google SQL for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_pg/", "Google SQL for SQL Server": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "TiDB": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "Kinetica Language To SQL Chat Model": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/", "ChatFireworks": "https://python.langchain.com/v0.2/docs/integrations/chat/fireworks/", "ChatYI": "https://python.langchain.com/v0.2/docs/integrations/chat/yi/", "ChatAnthropic": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "ChatGroq": "https://python.langchain.com/v0.2/docs/integrations/chat/groq/", "ChatGoogleGenerativeAI": "https://python.langchain.com/v0.2/docs/integrations/chat/google_generative_ai/", "OllamaFunctions": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama_functions/", "ChatOpenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/openai/", "ChatVertexAI": "https://python.langchain.com/v0.2/docs/integrations/chat/google_vertex_ai_palm/", "ChatBedrock": "https://python.langchain.com/v0.2/docs/integrations/chat/bedrock/", "JinaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/jinachat/", "ChatOllama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "ChatOCIGenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/oci_generative_ai/", "AzureChatOpenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/azure_chat_openai/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/chat/llamacpp/", "ChatMistralAI": "https://python.langchain.com/v0.2/docs/integrations/chat/mistralai/", "ChatAI21": "https://python.langchain.com/v0.2/docs/integrations/chat/ai21/", "ChatDatabricks": "https://python.langchain.com/v0.2/docs/integrations/chat/databricks/", "ChatTogether": "https://python.langchain.com/v0.2/docs/integrations/chat/together/", "Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/chat/edenai/", "ChatWatsonx": "https://python.langchain.com/v0.2/docs/integrations/chat/ibm_watsonx/", "vLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "ChatPerplexity": "https://python.langchain.com/v0.2/docs/integrations/chat/perplexity/", "ChatUpstage": "https://python.langchain.com/v0.2/docs/integrations/chat/upstage/", "ChatNVIDIA": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "Context": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "Fiddler": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "MLflow": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "ApertureDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/aperturedb/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "OpenAI metadata tagger": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/", "Image captions": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image_captions/", "Figma": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "OllamaLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/ollama/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/", "Build an Extraction Chain": "https://python.langchain.com/v0.2/docs/tutorials/extraction/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Classify Text into Labels": "https://python.langchain.com/v0.2/docs/tutorials/classification/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/", "Build a Simple LLM Application with LCEL": "https://python.langchain.com/v0.2/docs/tutorials/llm_chain/", "Build a PDF ingestion and Question/Answering system": "https://python.langchain.com/v0.2/docs/tutorials/pdf_qa/", "Vector stores and retrievers": "https://python.langchain.com/v0.2/docs/tutorials/retrievers/"}, "ChatAnthropic": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "Conceptual guide": "https://python.langchain.com/v0.2/docs/concepts/", "How to use callbacks in async environments": "https://python.langchain.com/v0.2/docs/how_to/callbacks_async/", "How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/", "How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "How to merge consecutive messages of the same type": "https://python.langchain.com/v0.2/docs/how_to/merge_message_runs/", "How to parse XML output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_xml/", "How to use prompting alone (no tool calling) to do extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_parse/", "How to handle rate limits": "https://python.langchain.com/v0.2/docs/how_to/chat_model_rate_limiting/", "How to add fallbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/fallbacks/", "How to propagate callbacks constructor": "https://python.langchain.com/v0.2/docs/how_to/callbacks_constructor/", "How to stream chat model responses": "https://python.langchain.com/v0.2/docs/how_to/chat_streaming/", "How to attach callbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/callbacks_attach/", "How to filter messages": "https://python.langchain.com/v0.2/docs/how_to/filter_messages/", "How to create a dynamic (self-constructing) chain": "https://python.langchain.com/v0.2/docs/how_to/dynamic_chain/", "How to create custom callback handlers": "https://python.langchain.com/v0.2/docs/how_to/custom_callbacks/", "How to configure runtime chain internals": "https://python.langchain.com/v0.2/docs/how_to/configure/", "How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/", "Response metadata": "https://python.langchain.com/v0.2/docs/how_to/response_metadata/", "How to pass callbacks in at runtime": "https://python.langchain.com/v0.2/docs/how_to/callbacks_runtime/", "Anthropic": "https://python.langchain.com/v0.2/docs/integrations/platforms/anthropic/", "PlayWright Browser Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/playwright/", "Riza Code Interpreter": "https://python.langchain.com/v0.2/docs/integrations/tools/riza/", "ChatAnthropic": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "Build an Agent": "https://python.langchain.com/v0.2/docs/tutorials/agents/"}, "ChatOpenAI": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "Conceptual guide": "https://python.langchain.com/v0.2/docs/concepts/", "# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/llm_router_chain/", "Load docs": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/retrieval_qa/", "# Example": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_rerank_docs_chain/", "How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to disable parallel tool calling": "https://python.langchain.com/v0.2/docs/how_to/tool_calling_parallel/", "How to add values to a chain's state": "https://python.langchain.com/v0.2/docs/how_to/assign/", "How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "How to inspect runnables": "https://python.langchain.com/v0.2/docs/how_to/inspect/", "How to bind model-specific tools": "https://python.langchain.com/v0.2/docs/how_to/tools_model_specific/", "How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to trim messages": "https://python.langchain.com/v0.2/docs/how_to/trim_messages/", "How to use few shot examples in chat models": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples_chat/", "How to use LangChain with different Pydantic versions": "https://python.langchain.com/v0.2/docs/how_to/pydantic_compatibility/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to add fallbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/fallbacks/", "How to map values to a graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_mapping/", "How to pass multimodal data directly to models": "https://python.langchain.com/v0.2/docs/how_to/multimodal_inputs/", "How to save and load LangChain objects": "https://python.langchain.com/v0.2/docs/how_to/serialization/", "How to use the output-fixing parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_fixing/", "How to convert tools to OpenAI Functions": "https://python.langchain.com/v0.2/docs/how_to/tools_as_openai_functions/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to add default invocation args to a Runnable": "https://python.langchain.com/v0.2/docs/how_to/binding/", "How to invoke runnables in parallel": "https://python.langchain.com/v0.2/docs/how_to/parallel/", "How to pass through arguments from one step to the next": "https://python.langchain.com/v0.2/docs/how_to/passthrough/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to configure runtime chain internals": "https://python.langchain.com/v0.2/docs/how_to/configure/", "How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "How to construct knowledge graphs": "https://python.langchain.com/v0.2/docs/how_to/graph_constructing/", "How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/", "How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/", "How to run custom functions": "https://python.langchain.com/v0.2/docs/how_to/functions/", "How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "How to add memory to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_memory/", "How deal with high cardinality categoricals when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_high_cardinality/", "How to get log probabilities": "https://python.langchain.com/v0.2/docs/how_to/logprobs/", "How to parse YAML output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_yaml/", "Response metadata": "https://python.langchain.com/v0.2/docs/how_to/response_metadata/", "How to parse JSON output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_json/", "How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/", "Hybrid Search": "https://python.langchain.com/v0.2/docs/how_to/hybrid/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "How to stream tool calls": "https://python.langchain.com/v0.2/docs/how_to/tool_streaming/", "How to use multimodal prompts": "https://python.langchain.com/v0.2/docs/how_to/multimodal_prompts/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "How to use few-shot prompting with tool calling": "https://python.langchain.com/v0.2/docs/how_to/tools_few_shot/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "WeChat": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/wechat/", "Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "LangSmith LLM Runs": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/", "Slack": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/slack/", "WhatsApp": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/whatsapp/", "LangSmith Chat Datasets": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_dataset/", "iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "Telegram": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/telegram/", "Discord": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "AzureAISearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "You.com": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "AskNews": "https://python.langchain.com/v0.2/docs/integrations/tools/asknews/", "LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "Outline": "https://python.langchain.com/v0.2/docs/integrations/retrievers/outline/", "SEC filing": "https://python.langchain.com/v0.2/docs/integrations/retrievers/sec_filings/", "TavilySearchAPIRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tavily/", "Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "ElasticsearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/", "Rememberizer": "https://python.langchain.com/v0.2/docs/integrations/retrievers/rememberizer/", "Milvus Hybrid Search Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/milvus_hybrid_search/", "Kay.ai": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kay/", "FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "Vectara self-querying ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/tencentvectordb/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/myscale_self_query/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "ChatGPT Plugins": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "Connery Toolkit and Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/connery/", "Infobip": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/", "PowerBI Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/powerbi/", "E2B Data Analysis": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/", "Human as a tool": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "Azure Container Apps dynamic sessions": "https://python.langchain.com/v0.2/docs/integrations/tools/azure_dynamic_sessions/", "FinancialDatasets Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/financial_datasets/", "Slack Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/slack/", "Cassandra Database Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/cassandra_database/", "Yahoo Finance News": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/", "Polygon IO Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon_toolkit/", "Semantic Scholar API Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/", "Spark SQL Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/spark_sql/", "Requests Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/requests/", "AINetwork Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/ainetwork/", "Passio NutritionAI": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/", "Cogniswitch Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/cogniswitch/", "Bearly Code Interpreter": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/", "Pandas Dataframe": "https://python.langchain.com/v0.2/docs/integrations/tools/pandas/", "ArXiv": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "Robocorp Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/robocorp/", "Connery Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/connery_toolkit/", "MultiOn Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/multion/", "Exa Search": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "Amadeus Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/amadeus/", "Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "You.com Search": "https://python.langchain.com/v0.2/docs/integrations/tools/you/", "OpenAPI Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/openapi/", "Shell (bash)": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/", "Polygon IO Toolkit and Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "AWS DynamoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/aws_dynamodb/", "Couchbase": "https://python.langchain.com/v0.2/docs/integrations/memory/couchbase_chat_message_history/", "MongoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/mongodb_chat_message_history/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Remembrall": "https://python.langchain.com/v0.2/docs/integrations/memory/remembrall/", "SQL (SQLAlchemy)": "https://python.langchain.com/v0.2/docs/integrations/memory/sql_chat_message_history/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "SQLite": "https://python.langchain.com/v0.2/docs/integrations/memory/sqlite/", "TiDB": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "ChatOpenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/openai/", "vLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Context": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "Label Studio": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "Trubrics": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "Infino": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "Upstash Ratelimit Callback": "https://python.langchain.com/v0.2/docs/integrations/callbacks/upstash_ratelimit/", "CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "MLflow": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "Arthur": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/dataherald/", "Log, Trace, and Monitor": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Hippo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vectara/", "Momento Vector Index (MVI)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "Neo4j Vector Index": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "KDB.AI": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/", "OpenAI metadata tagger": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Image captions": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image_captions/", "Figma": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "Browserbase": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/browserbase/", "Memgraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/", "RDFLib": "https://python.langchain.com/v0.2/docs/integrations/graphs/rdflib_sparql/", "NebulaGraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/nebula_graph/", "HugeGraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/hugegraph/", "Diffbot": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/", "Ontotext GraphDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/ontotext/", "Apache AGE": "https://python.langchain.com/v0.2/docs/integrations/graphs/apache_age/", "Neo4j": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "ArangoDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/arangodb/", "Amazon Neptune with Cypher": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/", "Kuzu": "https://python.langchain.com/v0.2/docs/integrations/graphs/kuzu_db/", "FalkorDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/falkordb/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Classify Text into Labels": "https://python.langchain.com/v0.2/docs/tutorials/classification/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/", "Build a Simple LLM Application with LCEL": "https://python.langchain.com/v0.2/docs/tutorials/llm_chain/", "Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/", "Build a Question Answering application over a Graph Database": "https://python.langchain.com/v0.2/docs/tutorials/graph/"}, "SystemMessage": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "How to merge consecutive messages of the same type": "https://python.langchain.com/v0.2/docs/how_to/merge_message_runs/", "How to trim messages": "https://python.langchain.com/v0.2/docs/how_to/trim_messages/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/", "How to filter messages": "https://python.langchain.com/v0.2/docs/how_to/filter_messages/", "How to compose prompts together": "https://python.langchain.com/v0.2/docs/how_to/prompts_composition/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Robocorp Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/robocorp/", "Exa Search": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "Snowflake Cortex": "https://python.langchain.com/v0.2/docs/integrations/chat/snowflake/", "# Related": "https://python.langchain.com/v0.2/docs/integrations/chat/solar/", "ChatHuggingFace": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "ChatOctoAI": "https://python.langchain.com/v0.2/docs/integrations/chat/octoai/", "ChatYI": "https://python.langchain.com/v0.2/docs/integrations/chat/yi/", "LlamaEdge": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_edge/", "ChatKonko": "https://python.langchain.com/v0.2/docs/integrations/chat/konko/", "GigaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/gigachat/", "JinaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/jinachat/", "ChatOCIGenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/oci_generative_ai/", "ChatEverlyAI": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/", "ChatFriendli": "https://python.langchain.com/v0.2/docs/integrations/chat/friendli/", "ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "ChatWatsonx": "https://python.langchain.com/v0.2/docs/integrations/chat/ibm_watsonx/", "vLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "ChatTongyi": "https://python.langchain.com/v0.2/docs/integrations/chat/tongyi/", "MoonshotChat": "https://python.langchain.com/v0.2/docs/integrations/chat/moonshot/", "ChatPremAI": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "ChatAnyscale": "https://python.langchain.com/v0.2/docs/integrations/chat/anyscale/", "ChatYandexGPT": "https://python.langchain.com/v0.2/docs/integrations/chat/yandex/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Context": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "Label Studio": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "Trubrics": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "Javelin AI Gateway Tutorial": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/", "Build a Simple LLM Application with LCEL": "https://python.langchain.com/v0.2/docs/tutorials/llm_chain/"}, "HumanMessage": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "Conceptual guide": "https://python.langchain.com/v0.2/docs/concepts/", "Build an Agent with AgentExecutor (Legacy)": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to use callbacks in async environments": "https://python.langchain.com/v0.2/docs/how_to/callbacks_async/", "How to merge consecutive messages of the same type": "https://python.langchain.com/v0.2/docs/how_to/merge_message_runs/", "How to trim messages": "https://python.langchain.com/v0.2/docs/how_to/trim_messages/", "How to do tool/function calling": "https://python.langchain.com/v0.2/docs/how_to/function_calling/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to pass multimodal data directly to models": "https://python.langchain.com/v0.2/docs/how_to/multimodal_inputs/", "How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/", "How to convert tools to OpenAI Functions": "https://python.langchain.com/v0.2/docs/how_to/tools_as_openai_functions/", "How to filter messages": "https://python.langchain.com/v0.2/docs/how_to/filter_messages/", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add message history": "https://python.langchain.com/v0.2/docs/how_to/message_history/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to pass tool outputs to chat models": "https://python.langchain.com/v0.2/docs/how_to/tool_results_pass_to_model/", "How to return structured data from a model": "https://python.langchain.com/v0.2/docs/how_to/structured_output/", "How to compose prompts together": "https://python.langchain.com/v0.2/docs/how_to/prompts_composition/", "How to use few-shot prompting with tool calling": "https://python.langchain.com/v0.2/docs/how_to/tools_few_shot/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "WeChat": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/wechat/", "Discord": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "Zep Open Source": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/", "Zep Cloud": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_cloud_memorystore/", "Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Imagen": "https://python.langchain.com/v0.2/docs/integrations/tools/google_imagen/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/", "Snowflake Cortex": "https://python.langchain.com/v0.2/docs/integrations/chat/snowflake/", "# Related": "https://python.langchain.com/v0.2/docs/integrations/chat/solar/", "ChatHuggingFace": "https://python.langchain.com/v0.2/docs/integrations/chat/huggingface/", "AzureMLChatOnlineEndpoint": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/", "Alibaba Cloud PAI EAS": "https://python.langchain.com/v0.2/docs/integrations/chat/alibaba_cloud_pai_eas/", "Chat with Coze Bot": "https://python.langchain.com/v0.2/docs/integrations/chat/coze/", "ChatOctoAI": "https://python.langchain.com/v0.2/docs/integrations/chat/octoai/", "ChatYI": "https://python.langchain.com/v0.2/docs/integrations/chat/yi/", "DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/chat/deepinfra/", "ChatLiteLLM": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/", "LlamaEdge": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_edge/", "VolcEngineMaasChat": "https://python.langchain.com/v0.2/docs/integrations/chat/volcengine_maas/", "ChatKonko": "https://python.langchain.com/v0.2/docs/integrations/chat/konko/", "MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "GigaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/gigachat/", "JinaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/jinachat/", "ChatOllama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "ChatOCIGenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/oci_generative_ai/", "ChatEverlyAI": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/", "GPTRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/", "ChatLiteLLMRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/", "ChatFriendli": "https://python.langchain.com/v0.2/docs/integrations/chat/friendli/", "ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Chat with Baichuan-192K": "https://python.langchain.com/v0.2/docs/integrations/chat/baichuan/", "QianfanChatEndpoint": "https://python.langchain.com/v0.2/docs/integrations/chat/baidu_qianfan_endpoint/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/llms/cohere/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/chat/edenai/", "ErnieBotChat": "https://python.langchain.com/v0.2/docs/integrations/chat/ernie/", "ChatWatsonx": "https://python.langchain.com/v0.2/docs/integrations/chat/ibm_watsonx/", "vLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "Tencent Hunyuan": "https://python.langchain.com/v0.2/docs/integrations/chat/tencent_hunyuan/", "MiniMaxChat": "https://python.langchain.com/v0.2/docs/integrations/chat/minimax/", "Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "ChatTongyi": "https://python.langchain.com/v0.2/docs/integrations/chat/tongyi/", "PromptLayerChatOpenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/promptlayer_chatopenai/", "SparkLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/sparkllm/", "MoonshotChat": "https://python.langchain.com/v0.2/docs/integrations/chat/moonshot/", "Dappier AI": "https://python.langchain.com/v0.2/docs/integrations/chat/dappier/", "Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "ChatPremAI": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "ChatAnyscale": "https://python.langchain.com/v0.2/docs/integrations/chat/anyscale/", "ChatYandexGPT": "https://python.langchain.com/v0.2/docs/integrations/chat/yandex/", "ChatNVIDIA": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Context": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "Label Studio": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "Trubrics": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/", "Arthur": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Browserbase": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/browserbase/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "Javelin AI Gateway Tutorial": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Build an Agent": "https://python.langchain.com/v0.2/docs/tutorials/agents/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/", "Build a Simple LLM Application with LCEL": "https://python.langchain.com/v0.2/docs/tutorials/llm_chain/"}, "RunnableMap": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/"}, "RunnableLambda": {"\ud83e\udd9c\ufe0f\ud83c\udfd3 LangServe": "https://python.langchain.com/v0.2/docs/langserve/", "How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/", "How to convert Runnables as Tools": "https://python.langchain.com/v0.2/docs/how_to/convert_runnable_to_tool/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/", "How to run custom functions": "https://python.langchain.com/v0.2/docs/how_to/functions/", "LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/", "How to select examples from a LangSmith dataset": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_langsmith/", "How to dispatch custom callback events": "https://python.langchain.com/v0.2/docs/how_to/callbacks_custom_events/", "Upstash Ratelimit Callback": "https://python.langchain.com/v0.2/docs/integrations/callbacks/upstash_ratelimit/", "Vector stores and retrievers": "https://python.langchain.com/v0.2/docs/tutorials/retrievers/"}, "PromptTemplate": {"Conceptual guide": "https://python.langchain.com/v0.2/docs/concepts/", "# Example": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_rerank_docs_chain/", "# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/llm_router_chain/", "How to better prompt when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_prompting/", "How to use output parsers to parse an LLM response into structured format": "https://python.langchain.com/v0.2/docs/how_to/output_parser_structured/", "How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/", "How to select examples by n-gram overlap": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_ngram/", "How to select examples by length": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_length_based/", "How to use example selectors": "https://python.langchain.com/v0.2/docs/how_to/example_selectors/", "How to use few shot examples": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples/", "How to select examples by similarity": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_similarity/", "How to parse XML output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_xml/", "How to reorder retrieved results to mitigate the \"lost in the middle\" effect": "https://python.langchain.com/v0.2/docs/how_to/long_context_reorder/", "How to add fallbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/fallbacks/", "Run models locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/", "How to configure runtime chain internals": "https://python.langchain.com/v0.2/docs/how_to/configure/", "How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/", "How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/", "How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "How to parse YAML output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_yaml/", "How to compose prompts together": "https://python.langchain.com/v0.2/docs/how_to/prompts_composition/", "How to partially format prompt templates": "https://python.langchain.com/v0.2/docs/how_to/prompts_partial/", "How to parse JSON output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_json/", "How to select examples by maximal marginal relevance (MMR)": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_mmr/", "How to track token usage for LLMs": "https://python.langchain.com/v0.2/docs/how_to/llm_token_usage_tracking/", "Clarifai": "https://python.langchain.com/v0.2/docs/integrations/llms/clarifai/", "RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "Google Drive": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_drive/", "Milvus Hybrid Search Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/milvus_hybrid_search/", "Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "NVIDIA Riva: ASR and TTS": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/", "Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "Dall-E Image Generator": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "Mot\u00f6rhead": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/", "Context": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "SageMaker Tracking": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Argilla": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "DSPy": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "Rebuff": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Prediction Guard": "https://python.langchain.com/v0.2/docs/integrations/llms/predictionguard/", "Shale Protocol": "https://python.langchain.com/v0.2/docs/integrations/providers/shaleprotocol/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "Ray Serve": "https://python.langchain.com/v0.2/docs/integrations/providers/ray_serve/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Identity-enabled RAG using PebbloRetrievalQA": "https://python.langchain.com/v0.2/docs/integrations/providers/pebblo/pebblo_retrieval_qa/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "Amazon Document DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "AirbyteLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte/", "Memgraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/", "Apache AGE": "https://python.langchain.com/v0.2/docs/integrations/graphs/apache_age/", "Neo4j": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "Baseten": "https://python.langchain.com/v0.2/docs/integrations/llms/baseten/", "StochasticAI": "https://python.langchain.com/v0.2/docs/integrations/llms/stochasticai/", "Solar": "https://python.langchain.com/v0.2/docs/integrations/llms/solar/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "IPEX-LLM": "https://python.langchain.com/v0.2/docs/integrations/llms/ipex_llm/", "Banana": "https://python.langchain.com/v0.2/docs/integrations/llms/banana/", "Alibaba Cloud PAI EAS": "https://python.langchain.com/v0.2/docs/integrations/llms/alibabacloud_pai_eas_endpoint/", "OpenLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/openllm/", "SageMakerEndpoint": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/", "Fireworks": "https://python.langchain.com/v0.2/docs/integrations/llms/fireworks/", "OctoAI": "https://python.langchain.com/v0.2/docs/integrations/llms/octoai/", "Writer": "https://python.langchain.com/v0.2/docs/integrations/llms/writer/", "Modal": "https://python.langchain.com/v0.2/docs/integrations/llms/modal/", "TextGen": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "Xorbits Inference (Xinference)": "https://python.langchain.com/v0.2/docs/integrations/llms/xinference/", "Nebula (Symbl.ai)": "https://python.langchain.com/v0.2/docs/integrations/llms/symblai_nebula/", "DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/llms/deepinfra/", "AnthropicLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/anthropic/", "NLP Cloud": "https://python.langchain.com/v0.2/docs/integrations/llms/nlpcloud/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/llms/gpt4all/", "ForefrontAI": "https://python.langchain.com/v0.2/docs/integrations/llms/forefrontai/", "MosaicML": "https://python.langchain.com/v0.2/docs/integrations/llms/mosaicml/", "Volc Engine Maas": "https://python.langchain.com/v0.2/docs/integrations/llms/volcengine_maas/", "CerebriumAI": "https://python.langchain.com/v0.2/docs/integrations/llms/cerebriumai/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/llms/openai/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "Predibase": "https://python.langchain.com/v0.2/docs/integrations/llms/predibase/", "GigaChat": "https://python.langchain.com/v0.2/docs/integrations/llms/gigachat/", "# Oracle Cloud Infrastructure Generative AI": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "Hugging Face Local Pipelines": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_pipelines/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Titan Takeoff": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/", "Aphrodite Engine": "https://python.langchain.com/v0.2/docs/integrations/llms/aphrodite/", "AI21LLM": "https://python.langchain.com/v0.2/docs/integrations/llms/ai21/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/llms/cohere/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "Javelin AI Gateway Tutorial": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "IBM watsonx.ai": "https://python.langchain.com/v0.2/docs/integrations/llms/ibm_watsonx/", "C Transformers": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/", "vLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/", "Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/", "Manifest": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/", "ExLlamaV2": "https://python.langchain.com/v0.2/docs/integrations/llms/exllamav2/", "Minimax": "https://python.langchain.com/v0.2/docs/integrations/llms/minimax/", "Tongyi Qwen": "https://python.langchain.com/v0.2/docs/integrations/llms/tongyi/", "Huggingface Endpoints": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/", "MLX Local Pipelines": "https://python.langchain.com/v0.2/docs/integrations/llms/mlx_pipelines/", "Runhouse": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/", "Anyscale": "https://python.langchain.com/v0.2/docs/integrations/llms/anyscale/", "YandexGPT": "https://python.langchain.com/v0.2/docs/integrations/llms/yandex/", "GooseAI": "https://python.langchain.com/v0.2/docs/integrations/llms/gooseai/", "OpenLM": "https://python.langchain.com/v0.2/docs/integrations/llms/openlm/", "Aleph Alpha": "https://python.langchain.com/v0.2/docs/integrations/llms/aleph_alpha/", "Cloudflare Workers AI": "https://python.langchain.com/v0.2/docs/integrations/llms/cloudflare_workersai/", "CTranslate2": "https://python.langchain.com/v0.2/docs/integrations/llms/ctranslate2/", "Google AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_ai/", "PipelineAI": "https://python.langchain.com/v0.2/docs/integrations/llms/pipelineai/", "ChatGLM": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/", "Gradient": "https://python.langchain.com/v0.2/docs/integrations/llms/gradient/", "Petals": "https://python.langchain.com/v0.2/docs/integrations/llms/petals/", "OpenVINO": "https://python.langchain.com/v0.2/docs/integrations/llms/openvino/", "Intel Weight-Only Quantization": "https://python.langchain.com/v0.2/docs/integrations/llms/weight_only_quantization/", "Replicate": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/", "Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "MessagesPlaceholder": {"Conceptual guide": "https://python.langchain.com/v0.2/docs/concepts/", "How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add message history": "https://python.langchain.com/v0.2/docs/how_to/message_history/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "Google SQL for MySQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mysql/", "Google AlloyDB for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_alloydb/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "AWS DynamoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/aws_dynamodb/", "Couchbase": "https://python.langchain.com/v0.2/docs/integrations/memory/couchbase_chat_message_history/", "MongoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/mongodb_chat_message_history/", "SQL (SQLAlchemy)": "https://python.langchain.com/v0.2/docs/integrations/memory/sql_chat_message_history/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "Google El Carro Oracle": "https://python.langchain.com/v0.2/docs/integrations/memory/google_el_carro/", "SQLite": "https://python.langchain.com/v0.2/docs/integrations/memory/sqlite/", "Google SQL for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_pg/", "Google SQL for SQL Server": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "TiDB": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "Build an Extraction Chain": "https://python.langchain.com/v0.2/docs/tutorials/extraction/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "CSVLoader": {"Conceptual guide": "https://python.langchain.com/v0.2/docs/concepts/", "How to load CSVs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_csv/", "ChatGPT plugin": "https://python.langchain.com/v0.2/docs/integrations/retrievers/chatgpt-plugin/", "Aerospike": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/aerospike/", "CSV": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/csv/", "Document loaders": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/index/", "Pebblo Safe DocumentLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pebblo/"}, "StrOutputParser": {"Conceptual guide": "https://python.langchain.com/v0.2/docs/concepts/", "# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/llm_chain/", "# Basic example (short documents)": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_reduce_chain/", "# Example": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/refine_docs_chain/", "Load docs": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/retrieval_qa/", "How to add values to a chain's state": "https://python.langchain.com/v0.2/docs/how_to/assign/", "How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/", "How to do per-user retrieval": "https://python.langchain.com/v0.2/docs/how_to/qa_per_user/", "How to inspect runnables": "https://python.langchain.com/v0.2/docs/how_to/inspect/", "How to summarize text through iterative refinement": "https://python.langchain.com/v0.2/docs/how_to/summarize_refine/", "How to create tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/", "How to add fallbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/fallbacks/", "How to map values to a graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_mapping/", "How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to summarize text through parallelization": "https://python.langchain.com/v0.2/docs/how_to/summarize_map_reduce/", "How to add default invocation args to a Runnable": "https://python.langchain.com/v0.2/docs/how_to/binding/", "How to convert Runnables as Tools": "https://python.langchain.com/v0.2/docs/how_to/convert_runnable_to_tool/", "How to stream events from a tool": "https://python.langchain.com/v0.2/docs/how_to/tool_stream_events/", "How to create a dynamic (self-constructing) chain": "https://python.langchain.com/v0.2/docs/how_to/dynamic_chain/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to invoke runnables in parallel": "https://python.langchain.com/v0.2/docs/how_to/parallel/", "How to pass through arguments from one step to the next": "https://python.langchain.com/v0.2/docs/how_to/passthrough/", "How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "How to run custom functions": "https://python.langchain.com/v0.2/docs/how_to/functions/", "How to chain runnables": "https://python.langchain.com/v0.2/docs/how_to/sequence/", "Hybrid Search": "https://python.langchain.com/v0.2/docs/how_to/hybrid/", "How to do query validation as part of SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_query_checking/", "Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "NVIDIA NIMs ": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "AzureAISearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "You.com": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "AskNews": "https://python.langchain.com/v0.2/docs/integrations/retrievers/asknews/", "WikipediaRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/wikipedia/", "TavilySearchAPIRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tavily/", "ArxivRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/arxiv/", "ElasticsearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/", "Milvus Hybrid Search Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/milvus_hybrid_search/", "Google Vertex AI Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/google_vertex_ai_search/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "ChatOllama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "ChatNVIDIA": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "Fiddler": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "DSPy": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "MLflow": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Shale Protocol": "https://python.langchain.com/v0.2/docs/integrations/providers/shaleprotocol/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Volc Engine Maas": "https://python.langchain.com/v0.2/docs/integrations/llms/volcengine_maas/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "AI21LLM": "https://python.langchain.com/v0.2/docs/integrations/llms/ai21/", "PipelineAI": "https://python.langchain.com/v0.2/docs/integrations/llms/pipelineai/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/", "Build a Simple LLM Application with LCEL": "https://python.langchain.com/v0.2/docs/tutorials/llm_chain/"}, "SimpleJsonOutputParser": {"Conceptual guide": "https://python.langchain.com/v0.2/docs/concepts/", "How to use output parsers to parse an LLM response into structured format": "https://python.langchain.com/v0.2/docs/how_to/output_parser_structured/"}, "BaseChatModel": {"Contribute Integrations": "https://python.langchain.com/v0.2/docs/contributing/integrations/", "How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "deprecated": {"Contribute Integrations": "https://python.langchain.com/v0.2/docs/contributing/integrations/"}, "UnstructuredMarkdownLoader": {"langchain": "https://python.langchain.com/v0.2/docs/changes/changelog/langchain/", "How to load Markdown": "https://python.langchain.com/v0.2/docs/how_to/document_loader_markdown/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/", "StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "UnstructuredMarkdownLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/unstructured_markdown/"}, "Document": {"# Example": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_rerank_docs_chain/", "# Basic example (short documents)": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_reduce_chain/", "How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to create a custom Document Loader": "https://python.langchain.com/v0.2/docs/how_to/document_loader_custom/", "How to summarize text through iterative refinement": "https://python.langchain.com/v0.2/docs/how_to/summarize_refine/", "How to summarize text through parallelization": "https://python.langchain.com/v0.2/docs/how_to/summarize_map_reduce/", "How to use the LangChain indexing API": "https://python.langchain.com/v0.2/docs/how_to/indexing/", "How to convert Runnables as Tools": "https://python.langchain.com/v0.2/docs/how_to/convert_runnable_to_tool/", "How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to create a custom Retriever": "https://python.langchain.com/v0.2/docs/how_to/custom_retriever/", "How to construct knowledge graphs": "https://python.langchain.com/v0.2/docs/how_to/graph_constructing/", "How to use a time-weighted vector store retriever": "https://python.langchain.com/v0.2/docs/how_to/time_weighted_vectorstore/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "How to load Markdown": "https://python.langchain.com/v0.2/docs/how_to/document_loader_markdown/", "How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/", "How to summarize text in a single LLM call": "https://python.langchain.com/v0.2/docs/how_to/summarize_stuff/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Oracle AI Vector Search: Generate Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/oracleai/", "Kinetica Vectorstore based Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kinetica/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "ChatGPT plugin": "https://python.langchain.com/v0.2/docs/integrations/retrievers/chatgpt-plugin/", "Cohere RAG": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere/", "Weaviate Hybrid Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/weaviate-hybrid/", "BM25": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bm25/", "Qdrant Sparse Vector": "https://python.langchain.com/v0.2/docs/integrations/retrievers/qdrant-sparse/", "ElasticsearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/", "TF-IDF": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tf_idf/", "Milvus": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/milvus/", "PGVector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pgvector_self_query/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/weaviate_self_query/", "Vectara self-querying ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "DashVector": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/databricks_vector_search/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/opensearch_self_query/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "Timescale Vector (Postgres) ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "Astra DB (Cassandra)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/astradb/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pinecone/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/supabase_self_query/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/myscale_self_query/", "Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/mongodb_atlas/", "Qdrant": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/qdrant/", "Oracle AI Vector Search: Generate Summary": "https://python.langchain.com/v0.2/docs/integrations/tools/oracleai/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Identity-enabled RAG using PebbloRetrievalQA": "https://python.langchain.com/v0.2/docs/integrations/providers/pebblo/pebblo_retrieval_qa/", "Kinetica Vectorstore API": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "PGVector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvector/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/", "Annoy": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Couchbase ": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/", "Oracle AI Vector Search: Vector Store": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/oracle/", "Neo4j Vector Index": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "Lantern": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "Google Firestore (Native Mode)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_firestore/", "ClickHouse": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/", "Astra DB Vector Store": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/astradb/", "Faiss (Async)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "PGVecto.rs": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "Postgres Embedding": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Faiss": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "Nuclia": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/nuclia_transformer/", "AI21SemanticTextSplitter": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/ai21_semantic_text_splitter/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "OpenAI metadata tagger": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/", "Doctran: extract properties": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_extract_properties/", "Google Translate": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_translate/", "Doctran: interrogate documents": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_interrogate_document/", "Doctran: language translation": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_translate_document/", "TensorFlow Datasets": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tensorflow_datasets/", "Google Cloud SQL for MySQL": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_cloud_sql_mysql/", "Airbyte Salesforce (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_salesforce/", "Airbyte CDK (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_cdk/", "Airbyte Stripe (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_stripe/", "Copy Paste": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/copypaste/", "Airbyte Typeform (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_typeform/", "Apify Dataset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/", "Google Firestore in Datastore Mode": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_datastore/", "Oracle AI Vector Search: Document Processing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/oracleai/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "Airbyte Hubspot (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_hubspot/", "Airbyte Gong (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_gong/", "Google Memorystore for Redis": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_memorystore_redis/", "Google Bigtable": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_bigtable/", "Google Cloud SQL for SQL server": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_cloud_sql_mssql/", "Google El Carro for Oracle Workloads": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_el_carro/", "Airbyte Shopify (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_shopify/", "Airbyte Zendesk Support (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_zendesk_support/", "Google Spanner": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_spanner/", "PDFMiner": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pdfminer/", "Azure Cosmos DB for Apache Gremlin": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/", "SageMakerEndpoint": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/", "Vector stores and retrievers": "https://python.langchain.com/v0.2/docs/tutorials/retrievers/"}, "LLMChain": {"# Example": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_rerank_docs_chain/", "# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/llm_chain/", "# Basic example (short documents)": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_reduce_chain/", "Clarifai": "https://python.langchain.com/v0.2/docs/integrations/llms/clarifai/", "RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "Memorize": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "Dall-E Image Generator": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "Mot\u00f6rhead": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/", "Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "Context": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "SageMaker Tracking": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Argilla": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/", "MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/", "Rebuff": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Prediction Guard": "https://python.langchain.com/v0.2/docs/integrations/llms/predictionguard/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "Ray Serve": "https://python.langchain.com/v0.2/docs/integrations/providers/ray_serve/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "Baseten": "https://python.langchain.com/v0.2/docs/integrations/llms/baseten/", "StochasticAI": "https://python.langchain.com/v0.2/docs/integrations/llms/stochasticai/", "Solar": "https://python.langchain.com/v0.2/docs/integrations/llms/solar/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "IPEX-LLM": "https://python.langchain.com/v0.2/docs/integrations/llms/ipex_llm/", "Banana": "https://python.langchain.com/v0.2/docs/integrations/llms/banana/", "Alibaba Cloud PAI EAS": "https://python.langchain.com/v0.2/docs/integrations/llms/alibabacloud_pai_eas_endpoint/", "OpenLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/openllm/", "OctoAI": "https://python.langchain.com/v0.2/docs/integrations/llms/octoai/", "Writer": "https://python.langchain.com/v0.2/docs/integrations/llms/writer/", "Modal": "https://python.langchain.com/v0.2/docs/integrations/llms/modal/", "TextGen": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "Xorbits Inference (Xinference)": "https://python.langchain.com/v0.2/docs/integrations/llms/xinference/", "Nebula (Symbl.ai)": "https://python.langchain.com/v0.2/docs/integrations/llms/symblai_nebula/", "DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/llms/deepinfra/", "NLP Cloud": "https://python.langchain.com/v0.2/docs/integrations/llms/nlpcloud/", "ForefrontAI": "https://python.langchain.com/v0.2/docs/integrations/llms/forefrontai/", "MosaicML": "https://python.langchain.com/v0.2/docs/integrations/llms/mosaicml/", "CerebriumAI": "https://python.langchain.com/v0.2/docs/integrations/llms/cerebriumai/", "Predibase": "https://python.langchain.com/v0.2/docs/integrations/llms/predibase/", "GigaChat": "https://python.langchain.com/v0.2/docs/integrations/llms/gigachat/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Aphrodite Engine": "https://python.langchain.com/v0.2/docs/integrations/llms/aphrodite/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "Javelin AI Gateway Tutorial": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/", "C Transformers": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/", "vLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/", "Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/", "Minimax": "https://python.langchain.com/v0.2/docs/integrations/llms/minimax/", "Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/llms/yuan2/", "Huggingface Endpoints": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/", "Runhouse": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/", "Anyscale": "https://python.langchain.com/v0.2/docs/integrations/llms/anyscale/", "YandexGPT": "https://python.langchain.com/v0.2/docs/integrations/llms/yandex/", "GooseAI": "https://python.langchain.com/v0.2/docs/integrations/llms/gooseai/", "OpenLM": "https://python.langchain.com/v0.2/docs/integrations/llms/openlm/", "Cloudflare Workers AI": "https://python.langchain.com/v0.2/docs/integrations/llms/cloudflare_workersai/", "CTranslate2": "https://python.langchain.com/v0.2/docs/integrations/llms/ctranslate2/", "ChatGLM": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/", "Gradient": "https://python.langchain.com/v0.2/docs/integrations/llms/gradient/", "Petals": "https://python.langchain.com/v0.2/docs/integrations/llms/petals/", "Replicate": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/"}, "StuffDocumentsChain": {"# Example": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/stuff_docs_chain/", "# Basic example (short documents)": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_reduce_chain/"}, "create_stuff_documents_chain": {"# Example": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/stuff_docs_chain/", "Load docs": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/retrieval_qa/", "How to reorder retrieved results to mitigate the \"lost in the middle\" effect": "https://python.langchain.com/v0.2/docs/how_to/long_context_reorder/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to summarize text in a single LLM call": "https://python.langchain.com/v0.2/docs/how_to/summarize_stuff/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "ApertureDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/aperturedb/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/", "Image captions": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image_captions/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Build a PDF ingestion and Question/Answering system": "https://python.langchain.com/v0.2/docs/tutorials/pdf_qa/"}, "LLMMathChain": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/llm_math_chain/"}, "BaseMessage": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/llm_math_chain/", "How to trim messages": "https://python.langchain.com/v0.2/docs/how_to/trim_messages/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to propagate callbacks constructor": "https://python.langchain.com/v0.2/docs/how_to/callbacks_constructor/", "How to attach callbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/callbacks_attach/", "How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/", "How to pass callbacks in at runtime": "https://python.langchain.com/v0.2/docs/how_to/callbacks_runtime/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "WeChat": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/wechat/", "Discord": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/"}, "RunnableConfig": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/multi_prompt_chain/", "# Example": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/refine_docs_chain/", "How to access the RunnableConfig from a tool": "https://python.langchain.com/v0.2/docs/how_to/tool_configure/", "How to summarize text through iterative refinement": "https://python.langchain.com/v0.2/docs/how_to/summarize_refine/", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to stream events from a tool": "https://python.langchain.com/v0.2/docs/how_to/tool_stream_events/", "How to run custom functions": "https://python.langchain.com/v0.2/docs/how_to/functions/", "How to add ad-hoc tool calling capability to LLMs and Chat Models": "https://python.langchain.com/v0.2/docs/how_to/tools_prompting/", "LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/", "How to dispatch custom callback events": "https://python.langchain.com/v0.2/docs/how_to/callbacks_custom_events/", "How to pass runtime secrets to runnables": "https://python.langchain.com/v0.2/docs/how_to/runnable_runtime_secrets/", "Tavily Search": "https://python.langchain.com/v0.2/docs/integrations/tools/tavily_search/"}, "tool": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/llm_math_chain/", "How to disable parallel tool calling": "https://python.langchain.com/v0.2/docs/how_to/tool_calling_parallel/", "How to use tools in a chain": "https://python.langchain.com/v0.2/docs/how_to/tools_chain/", "How to access the RunnableConfig from a tool": "https://python.langchain.com/v0.2/docs/how_to/tool_configure/", "How to do tool/function calling": "https://python.langchain.com/v0.2/docs/how_to/function_calling/", "How to pass run time values to tools": "https://python.langchain.com/v0.2/docs/how_to/tool_runtime/", "How to add a human-in-the-loop for tools": "https://python.langchain.com/v0.2/docs/how_to/tools_human/", "How to create tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/", "How to pass multimodal data directly to models": "https://python.langchain.com/v0.2/docs/how_to/multimodal_inputs/", "How to force models to call a tool": "https://python.langchain.com/v0.2/docs/how_to/tool_choice/", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to stream events from a tool": "https://python.langchain.com/v0.2/docs/how_to/tool_stream_events/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to pass tool outputs to chat models": "https://python.langchain.com/v0.2/docs/how_to/tool_results_pass_to_model/", "How to add ad-hoc tool calling capability to LLMs and Chat Models": "https://python.langchain.com/v0.2/docs/how_to/tools_prompting/", "How to return artifacts from a tool": "https://python.langchain.com/v0.2/docs/how_to/tool_artifacts/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "How to stream tool calls": "https://python.langchain.com/v0.2/docs/how_to/tool_streaming/", "How to pass runtime secrets to runnables": "https://python.langchain.com/v0.2/docs/how_to/runnable_runtime_secrets/", "How to use few-shot prompting with tool calling": "https://python.langchain.com/v0.2/docs/how_to/tools_few_shot/", "FinancialDatasets Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/financial_datasets/", "Exa Search": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/chat/deepinfra/", "ChatOllama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/chat/llamacpp/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/chat/edenai/", "ChatTongyi": "https://python.langchain.com/v0.2/docs/integrations/chat/tongyi/", "ChatPremAI": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "ChatNVIDIA": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/", "Log, Trace, and Monitor": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "JSONFormer": "https://python.langchain.com/v0.2/docs/integrations/llms/jsonformer_experimental/"}, "MultiPromptChain": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/multi_prompt_chain/"}, "ConversationChain": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/conversation_chain/"}, "ConversationBufferMemory": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/conversation_chain/", "Memorize": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "Gradio": "https://python.langchain.com/v0.2/docs/integrations/tools/gradio_tools/", "SceneXplain": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/"}, "InMemoryChatMessageHistory": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/conversation_chain/", "How to trim messages": "https://python.langchain.com/v0.2/docs/how_to/trim_messages/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "ChatNVIDIA": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/"}, "RunnableWithMessageHistory": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/conversation_chain/", "Build an Agent with AgentExecutor (Legacy)": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to trim messages": "https://python.langchain.com/v0.2/docs/how_to/trim_messages/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add message history": "https://python.langchain.com/v0.2/docs/how_to/message_history/", "How to add memory to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_memory/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/", "Google SQL for MySQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mysql/", "Google AlloyDB for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_alloydb/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "AWS DynamoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/aws_dynamodb/", "Couchbase": "https://python.langchain.com/v0.2/docs/integrations/memory/couchbase_chat_message_history/", "MongoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/mongodb_chat_message_history/", "SQL (SQLAlchemy)": "https://python.langchain.com/v0.2/docs/integrations/memory/sql_chat_message_history/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/memory/streamlit_chat_message_history/", "Google El Carro Oracle": "https://python.langchain.com/v0.2/docs/integrations/memory/google_el_carro/", "SQLite": "https://python.langchain.com/v0.2/docs/integrations/memory/sqlite/", "Google SQL for PostgreSQL": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_pg/", "Google SQL for SQL Server": "https://python.langchain.com/v0.2/docs/integrations/memory/google_sql_mssql/", "TiDB": "https://python.langchain.com/v0.2/docs/integrations/memory/tidb_chat_message_history/", "ChatNVIDIA": "https://python.langchain.com/v0.2/docs/integrations/chat/nvidia_ai_endpoints/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "BaseChatMessageHistory": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/conversation_chain/", "Build an Agent with AgentExecutor (Legacy)": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "ConstitutionalChain": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/constitutional_chain/"}, "ConstitutionalPrinciple": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/constitutional_chain/"}, "OpenAI": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/constitutional_chain/", "# Example": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_rerank_docs_chain/", "How to use output parsers to parse an LLM response into structured format": "https://python.langchain.com/v0.2/docs/how_to/output_parser_structured/", "How to reorder retrieved results to mitigate the \"lost in the middle\" effect": "https://python.langchain.com/v0.2/docs/how_to/long_context_reorder/", "How to add fallbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/fallbacks/", "How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/", "How to stream responses from an LLM": "https://python.langchain.com/v0.2/docs/how_to/streaming_llm/", "How to cache LLM responses": "https://python.langchain.com/v0.2/docs/how_to/llm_caching/", "How to track token usage for LLMs": "https://python.langchain.com/v0.2/docs/how_to/llm_token_usage_tracking/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Milvus": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/milvus_self_query/", "PGVector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pgvector_self_query/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/databricks_vector_search/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/opensearch_self_query/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/elasticsearch_self_query/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/chroma_self_query/", "Timescale Vector (Postgres) ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "Astra DB (Cassandra)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/astradb/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/supabase_self_query/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/redis_self_query/", "Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Qdrant": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/llms/openai/", "Jira Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/jira/", "Google Jobs": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "Google Serper": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "Azure Cognitive Services Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/azure_cognitive_services/", "Human as a tool": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "Natural Language API Toolkits": "https://python.langchain.com/v0.2/docs/integrations/tools/openapi_nla/", "Steam Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/steam/", "JSON Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/json/", "Google Finance": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "ClickUp Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/clickup/", "AWS Lambda": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "Google Drive": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "OpenWeatherMap": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "Eleven Labs Text2Speech": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "Office365 Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/office365/", "Pandas Dataframe": "https://python.langchain.com/v0.2/docs/integrations/tools/pandas/", "Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "Lemon Agent": "https://python.langchain.com/v0.2/docs/integrations/tools/lemonai/", "NASA Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/nasa/", "GraphQL": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "Gradio": "https://python.langchain.com/v0.2/docs/integrations/tools/gradio_tools/", "SceneXplain": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "Azure AI Services Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/azure_ai_services/", "OpenAPI Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/openapi/", "Dall-E Image Generator": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "Gitlab Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/gitlab/", "Ionic Shopping Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "Mot\u00f6rhead": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/", "Confident": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Fiddler": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/", "SageMaker Tracking": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Label Studio": "https://python.langchain.com/v0.2/docs/integrations/callbacks/labelstudio/", "Comet Tracing": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "Argilla": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/streamlit/", "Trubrics": "https://python.langchain.com/v0.2/docs/integrations/callbacks/trubrics/", "Infino": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "DSPy": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "Log10": "https://python.langchain.com/v0.2/docs/integrations/providers/log10/", "LangChain Decorators \u2728": "https://python.langchain.com/v0.2/docs/integrations/providers/langchain_decorators/", "Rebuff": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Helicone": "https://python.langchain.com/v0.2/docs/integrations/providers/helicone/", "Shale Protocol": "https://python.langchain.com/v0.2/docs/integrations/providers/shaleprotocol/", "WhyLabs": "https://python.langchain.com/v0.2/docs/integrations/providers/whylabs_profiling/", "WandB Tracing": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "ClearML": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "Ray Serve": "https://python.langchain.com/v0.2/docs/integrations/providers/ray_serve/", "Identity-enabled RAG using PebbloRetrievalQA": "https://python.langchain.com/v0.2/docs/integrations/providers/pebblo/pebblo_retrieval_qa/", "Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "Marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Amazon Document DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "VoyageAI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "Apify Dataset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/", "Psychic": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "Amazon Textract ": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/amazon_textract/", "NetworkX": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Layerup Security": "https://python.langchain.com/v0.2/docs/integrations/llms/layerup_security/", "Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "CRITIQUE_PROMPT": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/constitutional_chain/"}, "REVISION_PROMPT": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/constitutional_chain/"}, "WebBaseLoader": {"Load docs": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/retrieval_qa/", "# Basic example (short documents)": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_reduce_chain/", "Build an Agent with AgentExecutor (Legacy)": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to summarize text through parallelization": "https://python.langchain.com/v0.2/docs/how_to/summarize_map_reduce/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/", "RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "Infino": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "ApertureDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/aperturedb/", "Zep Cloud": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep_cloud/", "Zep": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "Merge Documents Loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/merge_doc/", "WebBaseLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/web_base/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "FAISS": {"Load docs": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/retrieval_qa/", "Build an Agent with AgentExecutor (Legacy)": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/", "How to better prompt when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_prompting/", "How to add values to a chain's state": "https://python.langchain.com/v0.2/docs/how_to/assign/", "How to inspect runnables": "https://python.langchain.com/v0.2/docs/how_to/inspect/", "How to deal with large databases when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_large_db/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to invoke runnables in parallel": "https://python.langchain.com/v0.2/docs/how_to/parallel/", "How to pass through arguments from one step to the next": "https://python.langchain.com/v0.2/docs/how_to/passthrough/", "How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "How to use a time-weighted vector store retriever": "https://python.langchain.com/v0.2/docs/how_to/time_weighted_vectorstore/", "How to create and query vector stores": "https://python.langchain.com/v0.2/docs/how_to/vectorstores/", "How to use a vectorstore as a retriever": "https://python.langchain.com/v0.2/docs/how_to/vectorstore_retriever/", "Caching": "https://python.langchain.com/v0.2/docs/how_to/caching_embeddings/", "How to combine results from multiple retrievers": "https://python.langchain.com/v0.2/docs/how_to/ensemble_retriever/", "How to select examples by maximal marginal relevance (MMR)": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_mmr/", "NVIDIA NIMs ": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "Cohere reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/", "Faiss (Async)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "Faiss": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/", "VoyageAI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/", "Volcengine Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/volcengine_rerank/", "OpenVINO Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/", "Cross Encoder Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/", "DashScope Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/dashscope_rerank/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/"}, "OpenAIEmbeddings": {"Load docs": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/retrieval_qa/", "Build an Agent with AgentExecutor (Legacy)": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/", "How to better prompt when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_prompting/", "How to add values to a chain's state": "https://python.langchain.com/v0.2/docs/how_to/assign/", "How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/", "How to do per-user retrieval": "https://python.langchain.com/v0.2/docs/how_to/qa_per_user/", "How to use few shot examples": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples/", "How to inspect runnables": "https://python.langchain.com/v0.2/docs/how_to/inspect/", "How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to use few shot examples in chat models": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples_chat/", "How to select examples by similarity": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_similarity/", "Text embedding models": "https://python.langchain.com/v0.2/docs/how_to/embed_text/", "How to deal with large databases when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_large_db/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to use the LangChain indexing API": "https://python.langchain.com/v0.2/docs/how_to/indexing/", "How to split text based on semantic similarity": "https://python.langchain.com/v0.2/docs/how_to/semantic-chunker/", "How to convert Runnables as Tools": "https://python.langchain.com/v0.2/docs/how_to/convert_runnable_to_tool/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to invoke runnables in parallel": "https://python.langchain.com/v0.2/docs/how_to/parallel/", "How to pass through arguments from one step to the next": "https://python.langchain.com/v0.2/docs/how_to/passthrough/", "How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "How to use a time-weighted vector store retriever": "https://python.langchain.com/v0.2/docs/how_to/time_weighted_vectorstore/", "How to create and query vector stores": "https://python.langchain.com/v0.2/docs/how_to/vectorstores/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/", "How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "How to use the Parent Document Retriever": "https://python.langchain.com/v0.2/docs/how_to/parent_document_retriever/", "How deal with high cardinality categoricals when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_high_cardinality/", "How to use a vectorstore as a retriever": "https://python.langchain.com/v0.2/docs/how_to/vectorstore_retriever/", "Caching": "https://python.langchain.com/v0.2/docs/how_to/caching_embeddings/", "How to combine results from multiple retrievers": "https://python.langchain.com/v0.2/docs/how_to/ensemble_retriever/", "How to select examples by maximal marginal relevance (MMR)": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_mmr/", "How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/", "Hybrid Search": "https://python.langchain.com/v0.2/docs/how_to/hybrid/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/openai/", "AzureAISearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "Kinetica Vectorstore based Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kinetica/", "JaguarDB Vector Database": "https://python.langchain.com/v0.2/docs/integrations/retrievers/jaguar/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/", "kNN": "https://python.langchain.com/v0.2/docs/integrations/retrievers/knn/", "DocArray": "https://python.langchain.com/v0.2/docs/integrations/retrievers/docarray_retriever/", "SVM": "https://python.langchain.com/v0.2/docs/integrations/retrievers/svm/", "Pinecone Hybrid Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/pinecone_hybrid_search/", "Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Milvus Hybrid Search Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/milvus_hybrid_search/", "FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "Milvus": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/milvus_self_query/", "PGVector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pgvector_self_query/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/elasticsearch_self_query/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/chroma_self_query/", "Timescale Vector (Postgres) ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "Astra DB (Cassandra)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/astradb/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/redis_self_query/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/", "Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Qdrant": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/", "Confident": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/", "Upstash Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/upstash/", "Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Identity-enabled RAG using PebbloRetrievalQA": "https://python.langchain.com/v0.2/docs/integrations/providers/pebblo/pebblo_retrieval_qa/", "LanceDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/", "Kinetica Vectorstore API": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Hippo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "Rockset": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/", "Zilliz": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/", "Azure Cosmos DB Mongo vCore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "viking DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/", "Typesense": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/", "Momento Vector Index (MVI)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "TiDB Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/", "Activeloop Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "Neo4j Vector Index": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "Lantern": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "DuckDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/", "Alibaba Cloud OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/", "StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "scikit-learn": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "DocArray HnswSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_hnsw/", "Tigris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/", "China Mobile ECloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/", "Faiss (Async)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "Azure AI Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "USearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/", "KDB.AI": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "DocArray InMemorySearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/", "Postgres Embedding": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Epsilla": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/", "Amazon Document DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "AnalyticDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/", "Hologres": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/", "Meilisearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/", "RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Image captions": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image_captions/", "Apify Dataset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/", "Psychic": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/", "Build a PDF ingestion and Question/Answering system": "https://python.langchain.com/v0.2/docs/tutorials/pdf_qa/", "Vector stores and retrievers": "https://python.langchain.com/v0.2/docs/tutorials/retrievers/"}, "RecursiveCharacterTextSplitter": {"Load docs": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/retrieval_qa/", "Build an Agent with AgentExecutor (Legacy)": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to split code": "https://python.langchain.com/v0.2/docs/how_to/code_splitter/", "How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to recursively split text by characters": "https://python.langchain.com/v0.2/docs/how_to/recursive_text_splitter/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "How to split text by tokens ": "https://python.langchain.com/v0.2/docs/how_to/split_by_token/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/", "How to use the Parent Document Retriever": "https://python.langchain.com/v0.2/docs/how_to/parent_document_retriever/", "How to split Markdown by Headers": "https://python.langchain.com/v0.2/docs/how_to/markdown_header_metadata_splitter/", "How to split by HTML header ": "https://python.langchain.com/v0.2/docs/how_to/HTML_header_metadata_splitter/", "How to split by HTML sections": "https://python.langchain.com/v0.2/docs/how_to/HTML_section_aware_splitter/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "Cohere reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Google Vertex AI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "viking DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/", "ApertureDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/aperturedb/", "Azure Cosmos DB No SQL": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db_no_sql/", "Zep Cloud": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep_cloud/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Zep": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/", "Vearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/", "VoyageAI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/", "Volcengine Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/volcengine_rerank/", "OpenVINO Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "Cross Encoder Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/", "DashScope Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/dashscope_rerank/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Image captions": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image_captions/", "Source Code": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/", "Build a PDF ingestion and Question/Answering system": "https://python.langchain.com/v0.2/docs/tutorials/pdf_qa/"}, "ConversationalRetrievalChain": {"Load docs": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/conversation_retrieval_chain/", "Outline": "https://python.langchain.com/v0.2/docs/integrations/retrievers/outline/", "SEC filing": "https://python.langchain.com/v0.2/docs/integrations/retrievers/sec_filings/", "Rememberizer": "https://python.langchain.com/v0.2/docs/integrations/retrievers/rememberizer/", "Kay.ai": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kay/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/"}, "create_history_aware_retriever": {"Load docs": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/conversation_retrieval_chain/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "create_retrieval_chain": {"Load docs": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/retrieval_qa/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/retrievers/ragatouille/", "ApertureDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/aperturedb/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/", "Image captions": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image_captions/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Build a PDF ingestion and Question/Answering system": "https://python.langchain.com/v0.2/docs/tutorials/pdf_qa/"}, "MapReduceDocumentsChain": {"# Basic example (short documents)": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_reduce_chain/"}, "ReduceDocumentsChain": {"# Basic example (short documents)": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_reduce_chain/"}, "CharacterTextSplitter": {"# Basic example (short documents)": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_reduce_chain/", "How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to split by character": "https://python.langchain.com/v0.2/docs/how_to/character_text_splitter/", "How to summarize text through parallelization": "https://python.langchain.com/v0.2/docs/how_to/summarize_map_reduce/", "How to use the LangChain indexing API": "https://python.langchain.com/v0.2/docs/how_to/indexing/", "How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "How to create and query vector stores": "https://python.langchain.com/v0.2/docs/how_to/vectorstores/", "How to split text by tokens ": "https://python.langchain.com/v0.2/docs/how_to/split_by_token/", "How to use a vectorstore as a retriever": "https://python.langchain.com/v0.2/docs/how_to/vectorstore_retriever/", "Caching": "https://python.langchain.com/v0.2/docs/how_to/caching_embeddings/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "AzureAISearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "Kinetica Vectorstore based Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kinetica/", "JaguarDB Vector Database": "https://python.langchain.com/v0.2/docs/integrations/retrievers/jaguar/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/singlestoredb/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "Confident": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "Upstash Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/upstash/", "VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "LanceDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "Kinetica Vectorstore API": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/", "SQLite-VSS": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/", "Vald": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "DashVector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/", "Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/", "ScaNN": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/", "Hippo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "Vespa": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/", "Rockset": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/", "Zilliz": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/", "Azure Cosmos DB Mongo vCore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "Annoy": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Couchbase ": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/", "Typesense": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/", "Momento Vector Index (MVI)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "TiDB Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/", "Relyt": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/relyt/", "Activeloop Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "vlite": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vlite/", "Neo4j Vector Index": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "Lantern": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "Tair": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/", "DuckDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/", "Alibaba Cloud OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/", "Clarifai": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clarifai/", "scikit-learn": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "DocArray HnswSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_hnsw/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/", "TileDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/", "Google Memorystore for Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_memorystore_redis/", "Tigris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/", "China Mobile ECloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/", "Bagel": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bagel/", "Baidu Cloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/", "AwaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/awadb/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/", "SurrealDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/", "Faiss (Async)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "BagelDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bageldb/", "ManticoreSearch VectorStore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/manticore_search/", "Azure AI Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/", "USearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/", "PGVecto.rs": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "Marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "DocArray InMemorySearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/", "Postgres Embedding": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/", "Intel's Visual Data Management System (VDMS)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Epsilla": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/", "Amazon Document DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "SemaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/", "AnalyticDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/", "Hologres": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/", "Baidu VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/", "Meilisearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/", "Psychic": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "Manifest": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/"}, "acollapse_docs": {"# Basic example (short documents)": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_reduce_chain/", "How to summarize text through parallelization": "https://python.langchain.com/v0.2/docs/how_to/summarize_map_reduce/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/"}, "split_list_of_docs": {"# Basic example (short documents)": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_reduce_chain/", "How to summarize text through parallelization": "https://python.langchain.com/v0.2/docs/how_to/summarize_map_reduce/", "Summarize Text": "https://python.langchain.com/v0.2/docs/tutorials/summarization/"}, "RefineDocumentsChain": {"# Example": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/refine_docs_chain/"}, "RetrievalQA": {"Load docs": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/retrieval_qa/", "LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "Bedrock (Knowledge Bases) Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bedrock/", "Cohere reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "Confident": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/", "ScaNN": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "Google Vertex AI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "Momento Vector Index (MVI)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "Activeloop Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/", "KDB.AI": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Amazon Document DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "VoyageAI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/"}, "RunnablePassthrough": {"Load docs": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/retrieval_qa/", "# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/llm_router_chain/", "How to add values to a chain's state": "https://python.langchain.com/v0.2/docs/how_to/assign/", "How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/", "How to do per-user retrieval": "https://python.langchain.com/v0.2/docs/how_to/qa_per_user/", "How to inspect runnables": "https://python.langchain.com/v0.2/docs/how_to/inspect/", "How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to do tool/function calling": "https://python.langchain.com/v0.2/docs/how_to/function_calling/", "How to add a human-in-the-loop for tools": "https://python.langchain.com/v0.2/docs/how_to/tools_human/", "How to deal with large databases when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_large_db/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to map values to a graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_mapping/", "How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to add default invocation args to a Runnable": "https://python.langchain.com/v0.2/docs/how_to/binding/", "How to convert Runnables as Tools": "https://python.langchain.com/v0.2/docs/how_to/convert_runnable_to_tool/", "How to create a dynamic (self-constructing) chain": "https://python.langchain.com/v0.2/docs/how_to/dynamic_chain/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to invoke runnables in parallel": "https://python.langchain.com/v0.2/docs/how_to/parallel/", "How to pass through arguments from one step to the next": "https://python.langchain.com/v0.2/docs/how_to/passthrough/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "How to add memory to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_memory/", "How deal with high cardinality categoricals when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_high_cardinality/", "How to add ad-hoc tool calling capability to LLMs and Chat Models": "https://python.langchain.com/v0.2/docs/how_to/tools_prompting/", "LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/", "Hybrid Search": "https://python.langchain.com/v0.2/docs/how_to/hybrid/", "How to use few-shot prompting with tool calling": "https://python.langchain.com/v0.2/docs/how_to/tools_few_shot/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "NVIDIA NIMs ": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nvidia_ai_endpoints/", "AzureAISearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "You.com": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "AskNews": "https://python.langchain.com/v0.2/docs/integrations/retrievers/asknews/", "WikipediaRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/wikipedia/", "TavilySearchAPIRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tavily/", "ArxivRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/arxiv/", "ElasticsearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/", "Milvus Hybrid Search Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/milvus_hybrid_search/", "Google Vertex AI Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/google_vertex_ai_search/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "DSPy": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/", "Vector stores and retrievers": "https://python.langchain.com/v0.2/docs/tutorials/retrievers/"}, "LLMRouterChain": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/llm_router_chain/"}, "RouterOutputParser": {"# Legacy": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/llm_router_chain/"}, "MapRerankDocumentsChain": {"# Example": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_rerank_docs_chain/"}, "RegexParser": {"# Example": "https://python.langchain.com/v0.2/docs/versions/migrating_chains/map_rerank_docs_chain/"}, "TavilySearchResults": {"Build an Agent with AgentExecutor (Legacy)": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to debug your LLM apps": "https://python.langchain.com/v0.2/docs/how_to/debugging/", "Tavily Search": "https://python.langchain.com/v0.2/docs/integrations/tools/tavily_search/", "ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Build an Agent": "https://python.langchain.com/v0.2/docs/tutorials/agents/"}, "create_retriever_tool": {"Build an Agent with AgentExecutor (Legacy)": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/"}, "create_tool_calling_agent": {"Build an Agent with AgentExecutor (Legacy)": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to use tools in a chain": "https://python.langchain.com/v0.2/docs/how_to/tools_chain/", "How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to debug your LLM apps": "https://python.langchain.com/v0.2/docs/how_to/debugging/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "Azure Container Apps dynamic sessions": "https://python.langchain.com/v0.2/docs/integrations/tools/azure_dynamic_sessions/", "FinancialDatasets Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/financial_datasets/", "Databricks Unity Catalog (UC)": "https://python.langchain.com/v0.2/docs/integrations/tools/databricks/", "Riza Code Interpreter": "https://python.langchain.com/v0.2/docs/integrations/tools/riza/", "Bing Search": "https://python.langchain.com/v0.2/docs/integrations/tools/bing_search/"}, "AgentExecutor": {"Build an Agent with AgentExecutor (Legacy)": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to use tools in a chain": "https://python.langchain.com/v0.2/docs/how_to/tools_chain/", "How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to debug your LLM apps": "https://python.langchain.com/v0.2/docs/how_to/debugging/", "How to migrate from legacy LangChain agents to LangGraph": "https://python.langchain.com/v0.2/docs/how_to/migrate_agent/", "Infobip": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/", "AskNews": "https://python.langchain.com/v0.2/docs/integrations/tools/asknews/", "Azure Container Apps dynamic sessions": "https://python.langchain.com/v0.2/docs/integrations/tools/azure_dynamic_sessions/", "FinancialDatasets Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/financial_datasets/", "Cassandra Database Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/cassandra_database/", "Polygon IO Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon_toolkit/", "Semantic Scholar API Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/", "Databricks Unity Catalog (UC)": "https://python.langchain.com/v0.2/docs/integrations/tools/databricks/", "Passio NutritionAI": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/", "Memorize": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "Riza Code Interpreter": "https://python.langchain.com/v0.2/docs/integrations/tools/riza/", "ArXiv": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "Robocorp Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/robocorp/", "MultiOn Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/multion/", "Exa Search": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "Amadeus Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/amadeus/", "Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "You.com Search": "https://python.langchain.com/v0.2/docs/integrations/tools/you/", "Bing Search": "https://python.langchain.com/v0.2/docs/integrations/tools/bing_search/", "Azure AI Services Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/azure_ai_services/", "Polygon IO Toolkit and Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/", "Ionic Shopping Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/streamlit/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/dataherald/", "Log, Trace, and Monitor": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/"}, "AIMessage": {"Build an Agent with AgentExecutor (Legacy)": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to merge consecutive messages of the same type": "https://python.langchain.com/v0.2/docs/how_to/merge_message_runs/", "How to trim messages": "https://python.langchain.com/v0.2/docs/how_to/trim_messages/", "How to do tool/function calling": "https://python.langchain.com/v0.2/docs/how_to/function_calling/", "How to add a human-in-the-loop for tools": "https://python.langchain.com/v0.2/docs/how_to/tools_human/", "How to use prompting alone (no tool calling) to do extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_parse/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/", "How to filter messages": "https://python.langchain.com/v0.2/docs/how_to/filter_messages/", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/", "How to return structured data from a model": "https://python.langchain.com/v0.2/docs/how_to/structured_output/", "How to compose prompts together": "https://python.langchain.com/v0.2/docs/how_to/prompts_composition/", "How to use few-shot prompting with tool calling": "https://python.langchain.com/v0.2/docs/how_to/tools_few_shot/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "Twitter (via Apify)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/twitter/", "Zep Open Source": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/", "Zep Cloud": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_cloud_memorystore/", "Google Imagen": "https://python.langchain.com/v0.2/docs/integrations/tools/google_imagen/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/", "ChatOllama": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama/", "ChatOCIGenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/oci_generative_ai/", "ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/", "ChatGLM": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/", "Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "ChatMessageHistory": {"Build an Agent with AgentExecutor (Legacy)": "https://python.langchain.com/v0.2/docs/how_to/agent_executor/", "How to add tools to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_tools/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add memory to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_memory/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/"}, "Neo4jGraph": {"How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to map values to a graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_mapping/", "How to construct knowledge graphs": "https://python.langchain.com/v0.2/docs/how_to/graph_constructing/", "How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "Neo4j": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "Diffbot": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/", "Build a Question Answering application over a Graph Database": "https://python.langchain.com/v0.2/docs/tutorials/graph/"}, "AsyncCallbackManagerForToolRun": {"How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to create tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/"}, "CallbackManagerForToolRun": {"How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to create tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/"}, "BaseTool": {"How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to use LangChain with different Pydantic versions": "https://python.langchain.com/v0.2/docs/how_to/pydantic_compatibility/", "How to pass run time values to tools": "https://python.langchain.com/v0.2/docs/how_to/tool_runtime/", "How to create tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/", "How to return artifacts from a tool": "https://python.langchain.com/v0.2/docs/how_to/tool_artifacts/"}, "format_to_openai_function_messages": {"How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/"}, "OpenAIFunctionsAgentOutputParser": {"How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/"}, "convert_to_openai_function": {"How to add a semantic layer over graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_semantic/", "How to convert tools to OpenAI Functions": "https://python.langchain.com/v0.2/docs/how_to/tools_as_openai_functions/"}, "BSHTMLLoader": {"How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to load HTML": "https://python.langchain.com/v0.2/docs/how_to/document_loader_html/", "BSHTMLLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/bshtml/"}, "TokenTextSplitter": {"How to handle long text when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_long_text/", "How to split text by tokens ": "https://python.langchain.com/v0.2/docs/how_to/split_by_token/", "AzureAISearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/", "StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/"}, "PyPDFLoader": {"How to load PDFs": "https://python.langchain.com/v0.2/docs/how_to/document_loader_pdf/", "Google Vertex AI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_vertex_ai_vector_search/", "Azure Cosmos DB No SQL": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db_no_sql/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/", "KDB.AI": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/", "PyPDFLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pypdfloader/", "Merge Documents Loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/merge_doc/", "Google Cloud Storage File": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_cloud_storage_file/", "Build a PDF ingestion and Question/Answering system": "https://python.langchain.com/v0.2/docs/tutorials/pdf_qa/"}, "SQLDatabase": {"How to better prompt when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_prompting/", "How to deal with large databases when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_large_db/", "How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "How to do query validation as part of SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_query_checking/", "SQLDatabase Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/sql_database/", "CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "Rebuff": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/"}, "create_sql_query_chain": {"How to better prompt when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_prompting/", "How to deal with large databases when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_large_db/", "How to do query validation as part of SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_query_checking/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/"}, "FewShotPromptTemplate": {"How to better prompt when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_prompting/", "How to select examples by n-gram overlap": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_ngram/", "How to select examples by length": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_length_based/", "How to use example selectors": "https://python.langchain.com/v0.2/docs/how_to/example_selectors/", "How to use few shot examples": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples/", "How to select examples by similarity": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_similarity/", "How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "How to select examples by maximal marginal relevance (MMR)": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_mmr/", "Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "SemanticSimilarityExampleSelector": {"How to better prompt when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_prompting/", "How to use few shot examples": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples/", "How to use few shot examples in chat models": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples_chat/", "How to select examples by similarity": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_similarity/", "How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "How to select examples by maximal marginal relevance (MMR)": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_mmr/"}, "PydanticOutputParser": {"How to use output parsers to parse an LLM response into structured format": "https://python.langchain.com/v0.2/docs/how_to/output_parser_structured/", "How to use prompting alone (no tool calling) to do extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_parse/", "How to use the output-fixing parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_fixing/", "How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/", "How to return structured data from a model": "https://python.langchain.com/v0.2/docs/how_to/structured_output/", "Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "AsyncCallbackHandler": {"How to use callbacks in async environments": "https://python.langchain.com/v0.2/docs/how_to/callbacks_async/", "How to dispatch custom callback events": "https://python.langchain.com/v0.2/docs/how_to/callbacks_custom_events/", "Bedrock": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/"}, "BaseCallbackHandler": {"How to use callbacks in async environments": "https://python.langchain.com/v0.2/docs/how_to/callbacks_async/", "How to propagate callbacks constructor": "https://python.langchain.com/v0.2/docs/how_to/callbacks_constructor/", "How to attach callbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/callbacks_attach/", "How to create custom callback handlers": "https://python.langchain.com/v0.2/docs/how_to/custom_callbacks/", "How to dispatch custom callback events": "https://python.langchain.com/v0.2/docs/how_to/callbacks_custom_events/", "How to pass callbacks in at runtime": "https://python.langchain.com/v0.2/docs/how_to/callbacks_runtime/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/llms/gpt4all/"}, "LLMResult": {"How to use callbacks in async environments": "https://python.langchain.com/v0.2/docs/how_to/callbacks_async/", "How to propagate callbacks constructor": "https://python.langchain.com/v0.2/docs/how_to/callbacks_constructor/", "How to attach callbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/callbacks_attach/", "How to pass callbacks in at runtime": "https://python.langchain.com/v0.2/docs/how_to/callbacks_runtime/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/"}, "RunnableParallel": {"How to add values to a chain's state": "https://python.langchain.com/v0.2/docs/how_to/assign/", "How to invoke runnables in parallel": "https://python.langchain.com/v0.2/docs/how_to/parallel/", "How to pass through arguments from one step to the next": "https://python.langchain.com/v0.2/docs/how_to/passthrough/", "How to add message history": "https://python.langchain.com/v0.2/docs/how_to/message_history/", "How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/", "How to chain runnables": "https://python.langchain.com/v0.2/docs/how_to/sequence/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/"}, "RunnableBranch": {"How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/"}, "cosine_similarity": {"How to route between sub-chains": "https://python.langchain.com/v0.2/docs/how_to/routing/"}, "ConfigurableField": {"How to do per-user retrieval": "https://python.langchain.com/v0.2/docs/how_to/qa_per_user/", "How to configure runtime chain internals": "https://python.langchain.com/v0.2/docs/how_to/configure/", "LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/", "How to combine results from multiple retrievers": "https://python.langchain.com/v0.2/docs/how_to/ensemble_retriever/", "Hybrid Search": "https://python.langchain.com/v0.2/docs/how_to/hybrid/"}, "NGramOverlapExampleSelector": {"How to select examples by n-gram overlap": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_ngram/"}, "get_openai_callback": {"How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "How to run custom functions": "https://python.langchain.com/v0.2/docs/how_to/functions/", "How to track token usage for LLMs": "https://python.langchain.com/v0.2/docs/how_to/llm_token_usage_tracking/", "AzureChatOpenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/azure_chat_openai/"}, "load_tools": {"How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "ChatGPT Plugins": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "Google Jobs": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "Human as a tool": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "Google Finance": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "AWS Lambda": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "Google Drive": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "OpenWeatherMap": "https://python.langchain.com/v0.2/docs/integrations/providers/openweathermap/", "Memorize": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "Eleven Labs Text2Speech": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "ArXiv": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "GraphQL": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "SceneXplain": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "Dall-E Image Generator": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "SageMaker Tracking": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Comet Tracing": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "Argilla": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/streamlit/", "SerpAPI": "https://python.langchain.com/v0.2/docs/integrations/providers/serpapi/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Golden": "https://python.langchain.com/v0.2/docs/integrations/providers/golden/", "Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "Wolfram Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/wolfram_alpha/", "MLflow": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "DataForSEO": "https://python.langchain.com/v0.2/docs/integrations/providers/dataforseo/", "SearxNG Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/searx/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Stack Exchange": "https://python.langchain.com/v0.2/docs/integrations/providers/stackexchange/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "WandB Tracing": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "ClearML": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/dataherald/", "Amazon API Gateway": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/"}, "ChatBedrock": {"How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/", "Response metadata": "https://python.langchain.com/v0.2/docs/how_to/response_metadata/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "ChatBedrock": "https://python.langchain.com/v0.2/docs/integrations/chat/bedrock/", "Amazon Neptune with SPARQL": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_sparql/"}, "get_bedrock_anthropic_callback": {"How to track token usage in ChatModels": "https://python.langchain.com/v0.2/docs/how_to/chat_token_usage_tracking/"}, "CallbackManagerForLLMRun": {"How to create a custom LLM class": "https://python.langchain.com/v0.2/docs/how_to/custom_llm/", "How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "LLM": {"How to create a custom LLM class": "https://python.langchain.com/v0.2/docs/how_to/custom_llm/"}, "GenerationChunk": {"How to create a custom LLM class": "https://python.langchain.com/v0.2/docs/how_to/custom_llm/"}, "BaseLoader": {"How to create a custom Document Loader": "https://python.langchain.com/v0.2/docs/how_to/document_loader_custom/", "How to use the LangChain indexing API": "https://python.langchain.com/v0.2/docs/how_to/indexing/"}, "BaseBlobParser": {"How to create a custom Document Loader": "https://python.langchain.com/v0.2/docs/how_to/document_loader_custom/"}, "Blob": {"How to create a custom Document Loader": "https://python.langchain.com/v0.2/docs/how_to/document_loader_custom/", "Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Cloud Document AI": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_docai/"}, "FileSystemBlobLoader": {"How to create a custom Document Loader": "https://python.langchain.com/v0.2/docs/how_to/document_loader_custom/"}, "GenericLoader": {"How to create a custom Document Loader": "https://python.langchain.com/v0.2/docs/how_to/document_loader_custom/", "Grobid": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/grobid/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/", "Source Code": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/"}, "LengthBasedExampleSelector": {"How to select examples by length": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_length_based/"}, "BaseExampleSelector": {"How to use example selectors": "https://python.langchain.com/v0.2/docs/how_to/example_selectors/"}, "Language": {"How to split code": "https://python.langchain.com/v0.2/docs/how_to/code_splitter/", "Source Code": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/"}, "Chroma": {"How to use few shot examples": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples/", "How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to use few shot examples in chat models": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples_chat/", "How to select examples by similarity": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_similarity/", "How to reorder retrieved results to mitigate the \"lost in the middle\" effect": "https://python.langchain.com/v0.2/docs/how_to/long_context_reorder/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to stream results from your RAG application": "https://python.langchain.com/v0.2/docs/how_to/qa_streaming/", "How to get your RAG application to return sources": "https://python.langchain.com/v0.2/docs/how_to/qa_sources/", "How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to add chat history": "https://python.langchain.com/v0.2/docs/how_to/qa_chat_history_how_to/", "How to add retrieval to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_retrieval/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "How to create and query vector stores": "https://python.langchain.com/v0.2/docs/how_to/vectorstores/", "How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/", "How to use the Parent Document Retriever": "https://python.langchain.com/v0.2/docs/how_to/parent_document_retriever/", "How deal with high cardinality categoricals when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_high_cardinality/", "How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/", "RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/", "LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/chroma/", "Confident": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "Image captions": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image_captions/", "Psychic": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "Build a Retrieval Augmented Generation (RAG) App": "https://python.langchain.com/v0.2/docs/tutorials/rag/", "Build a Local RAG Application": "https://python.langchain.com/v0.2/docs/tutorials/local_rag/", "Conversational RAG": "https://python.langchain.com/v0.2/docs/tutorials/qa_chat_history/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/", "Build a PDF ingestion and Question/Answering system": "https://python.langchain.com/v0.2/docs/tutorials/pdf_qa/", "Vector stores and retrievers": "https://python.langchain.com/v0.2/docs/tutorials/retrievers/"}, "merge_message_runs": {"How to merge consecutive messages of the same type": "https://python.langchain.com/v0.2/docs/how_to/merge_message_runs/"}, "PydanticToolsParser": {"How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to do tool/function calling": "https://python.langchain.com/v0.2/docs/how_to/function_calling/", "How to deal with large databases when doing SQL question-answering": "https://python.langchain.com/v0.2/docs/how_to/sql_large_db/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to use chat models to call tools": "https://python.langchain.com/v0.2/docs/how_to/tool_calling/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "ChatPremAI": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/"}, "chain": {"How to handle cases where no queries are generated": "https://python.langchain.com/v0.2/docs/how_to/query_no_queries/", "How to pass run time values to tools": "https://python.langchain.com/v0.2/docs/how_to/tool_runtime/", "How to handle multiple queries when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_queries/", "How to create a dynamic (self-constructing) chain": "https://python.langchain.com/v0.2/docs/how_to/dynamic_chain/", "How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to handle multiple retrievers when doing query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_multiple_retrievers/", "How to run custom functions": "https://python.langchain.com/v0.2/docs/how_to/functions/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "Tavily Search": "https://python.langchain.com/v0.2/docs/integrations/tools/tavily_search/"}, "trim_messages": {"How to trim messages": "https://python.langchain.com/v0.2/docs/how_to/trim_messages/", "How to add memory to chatbots": "https://python.langchain.com/v0.2/docs/how_to/chatbots_memory/", "Build a Chatbot": "https://python.langchain.com/v0.2/docs/tutorials/chatbot/"}, "ToolMessage": {"How to trim messages": "https://python.langchain.com/v0.2/docs/how_to/trim_messages/", "How to do tool/function calling": "https://python.langchain.com/v0.2/docs/how_to/function_calling/", "How to use reference examples when doing extraction": "https://python.langchain.com/v0.2/docs/how_to/extraction_examples/", "How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to return structured data from a model": "https://python.langchain.com/v0.2/docs/how_to/structured_output/", "How to use few-shot prompting with tool calling": "https://python.langchain.com/v0.2/docs/how_to/tools_few_shot/", "How to add examples to the prompt for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_few_shot/", "Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/chat/edenai/", "ChatPremAI": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/"}, "RecursiveJsonSplitter": {"How to split JSON data": "https://python.langchain.com/v0.2/docs/how_to/recursive_json_splitter/"}, "FewShotChatMessagePromptTemplate": {"How to use few shot examples in chat models": "https://python.langchain.com/v0.2/docs/how_to/few_shot_examples_chat/", "Fiddler": "https://python.langchain.com/v0.2/docs/integrations/callbacks/fiddler/"}, "XMLOutputParser": {"How to parse XML output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_xml/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/"}, "InjectedToolArg": {"How to pass run time values to tools": "https://python.langchain.com/v0.2/docs/how_to/tool_runtime/"}, "Runnable": {"How to add a human-in-the-loop for tools": "https://python.langchain.com/v0.2/docs/how_to/tools_human/", "How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/", "How to create a dynamic (self-constructing) chain": "https://python.langchain.com/v0.2/docs/how_to/dynamic_chain/"}, "StructuredTool": {"How to create tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/", "Infobip": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/"}, "GenericFakeChatModel": {"How to create tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/"}, "ToolException": {"How to create tools": "https://python.langchain.com/v0.2/docs/how_to/custom_tools/"}, "AzureAIDocumentIntelligenceLoader": {"How to load Microsoft Office files": "https://python.langchain.com/v0.2/docs/how_to/document_loader_office_file/", "Microsoft Word": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_word/", "Microsoft Excel": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_excel/", "Microsoft PowerPoint": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_powerpoint/", "Azure AI Document Intelligence": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_document_intelligence/"}, "InMemoryRateLimiter": {"How to handle rate limits": "https://python.langchain.com/v0.2/docs/how_to/chat_model_rate_limiting/"}, "LongContextReorder": {"How to reorder retrieved results to mitigate the \"lost in the middle\" effect": "https://python.langchain.com/v0.2/docs/how_to/long_context_reorder/", "LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/"}, "DatetimeOutputParser": {"How to add fallbacks to a runnable": "https://python.langchain.com/v0.2/docs/how_to/fallbacks/"}, "CypherQueryCorrector": {"How to map values to a graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_mapping/"}, "Schema": {"How to map values to a graph database": "https://python.langchain.com/v0.2/docs/how_to/graph_mapping/"}, "dumpd": {"How to save and load LangChain objects": "https://python.langchain.com/v0.2/docs/how_to/serialization/"}, "dumps": {"How to save and load LangChain objects": "https://python.langchain.com/v0.2/docs/how_to/serialization/"}, "load": {"How to save and load LangChain objects": "https://python.langchain.com/v0.2/docs/how_to/serialization/"}, "loads": {"How to save and load LangChain objects": "https://python.langchain.com/v0.2/docs/how_to/serialization/"}, "set_llm_cache": {"How to cache chat model responses": "https://python.langchain.com/v0.2/docs/how_to/chat_model_caching/", "How to cache LLM responses": "https://python.langchain.com/v0.2/docs/how_to/llm_caching/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "DSPy": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/providers/mongodb_atlas/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "Couchbase": "https://python.langchain.com/v0.2/docs/integrations/providers/couchbase/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/redis/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/", "Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/"}, "InMemoryCache": {"How to cache chat model responses": "https://python.langchain.com/v0.2/docs/how_to/chat_model_caching/", "How to cache LLM responses": "https://python.langchain.com/v0.2/docs/how_to/llm_caching/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/"}, "SQLiteCache": {"How to cache chat model responses": "https://python.langchain.com/v0.2/docs/how_to/chat_model_caching/", "How to cache LLM responses": "https://python.langchain.com/v0.2/docs/how_to/llm_caching/", "Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "DSPy": "https://python.langchain.com/v0.2/docs/integrations/providers/dspy/"}, "create_sql_agent": {"How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/"}, "PythonAstREPLTool": {"How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/"}, "JsonOutputKeyToolsParser": {"How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/"}, "create_pandas_dataframe_agent": {"How to do question answering over CSVs": "https://python.langchain.com/v0.2/docs/how_to/sql_csv/", "Pandas Dataframe": "https://python.langchain.com/v0.2/docs/integrations/tools/pandas/"}, "OutputFixingParser": {"How to use the output-fixing parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_fixing/", "How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/"}, "FunctionMessage": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "AIMessageChunk": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/", "How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/", "Google Cloud Vertex AI": "https://python.langchain.com/v0.2/docs/integrations/llms/google_vertex_ai_palm/"}, "FunctionMessageChunk": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "HumanMessageChunk": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "SystemMessageChunk": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "ToolMessageChunk": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "AsyncCallbackManagerForLLMRun": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "SimpleChatModel": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "ChatGeneration": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/", "How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/"}, "ChatGenerationChunk": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "ChatResult": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "run_in_executor": {"How to create a custom chat model class": "https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/"}, "MoveFileTool": {"How to convert tools to OpenAI Functions": "https://python.langchain.com/v0.2/docs/how_to/tools_as_openai_functions/"}, "filter_messages": {"How to filter messages": "https://python.langchain.com/v0.2/docs/how_to/filter_messages/"}, "ToolCall": {"How to handle tool errors": "https://python.langchain.com/v0.2/docs/how_to/tools_error/"}, "SQLRecordManager": {"How to use the LangChain indexing API": "https://python.langchain.com/v0.2/docs/how_to/indexing/"}, "index": {"How to use the LangChain indexing API": "https://python.langchain.com/v0.2/docs/how_to/indexing/"}, "SemanticChunker": {"How to split text based on semantic similarity": "https://python.langchain.com/v0.2/docs/how_to/semantic-chunker/"}, "InMemoryVectorStore": {"How to convert Runnables as Tools": "https://python.langchain.com/v0.2/docs/how_to/convert_runnable_to_tool/", "FireworksEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/fireworks/", "OpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/openai/", "OllamaEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ollama/", "MistralAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/mistralai/", "AI21Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ai21/", "TogetherEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/together/", "CohereEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/cohere/", "AzureOpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/azureopenai/", "NomicEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nomic/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon MemoryDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/memorydb/"}, "JsonOutputParser": {"How to stream runnables": "https://python.langchain.com/v0.2/docs/how_to/streaming/", "How to add ad-hoc tool calling capability to LLMs and Chat Models": "https://python.langchain.com/v0.2/docs/how_to/tools_prompting/", "How to parse JSON output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_json/"}, "InMemoryByteStore": {"How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "Caching": "https://python.langchain.com/v0.2/docs/how_to/caching_embeddings/", "InMemoryByteStore": "https://python.langchain.com/v0.2/docs/integrations/stores/in_memory/"}, "TextLoader": {"How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "How to load documents from a directory": "https://python.langchain.com/v0.2/docs/how_to/document_loader_directory/", "How to create and query vector stores": "https://python.langchain.com/v0.2/docs/how_to/vectorstores/", "How to use the Parent Document Retriever": "https://python.langchain.com/v0.2/docs/how_to/parent_document_retriever/", "How to use a vectorstore as a retriever": "https://python.langchain.com/v0.2/docs/how_to/vectorstore_retriever/", "Caching": "https://python.langchain.com/v0.2/docs/how_to/caching_embeddings/", "AzureAISearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "Kinetica Vectorstore based Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kinetica/", "JaguarDB Vector Database": "https://python.langchain.com/v0.2/docs/integrations/retrievers/jaguar/", "LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "Cohere reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/singlestoredb/", "FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "Confident": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "Upstash Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/upstash/", "VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "Vectara Chat": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "LanceDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "Kinetica Vectorstore API": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/", "SQLite-VSS": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/", "Vald": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "DashVector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/", "Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/", "ScaNN": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/", "Hippo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/", "Vespa": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/", "Rockset": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/", "Zilliz": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/", "Azure Cosmos DB Mongo vCore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/", "viking DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/", "Annoy": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Couchbase ": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/couchbase/", "Typesense": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/", "Momento Vector Index (MVI)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/", "TiDB Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/", "Relyt": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/relyt/", "Atlas": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/atlas/", "Activeloop Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/", "vlite": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vlite/", "Neo4j Vector Index": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "Lantern": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/", "Tair": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/", "DuckDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/", "Alibaba Cloud OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/", "Clarifai": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clarifai/", "scikit-learn": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "DocArray HnswSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_hnsw/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/", "TileDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/", "Google Memorystore for Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_memorystore_redis/", "Tigris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/", "China Mobile ECloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/", "Bagel": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bagel/", "Baidu Cloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/", "AwaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/awadb/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/", "SurrealDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/", "Faiss (Async)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss_async/", "BagelDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bageldb/", "ManticoreSearch VectorStore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/manticore_search/", "Azure AI Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/", "USearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/", "PGVecto.rs": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "Marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "DocArray InMemorySearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/", "Postgres Embedding": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/", "Intel's Visual Data Management System (VDMS)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Epsilla": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/", "Amazon Document DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/documentdb/", "SemaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/", "AnalyticDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/", "Hologres": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/", "Baidu VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/", "Vearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/", "Meilisearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/", "VoyageAI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/", "Volcengine Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/volcengine_rerank/", "OpenVINO Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/", "Cross Encoder Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/", "DashScope Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/dashscope_rerank/"}, "MultiVectorRetriever": {"How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/"}, "SearchType": {"How to retrieve using multiple vectors per document": "https://python.langchain.com/v0.2/docs/how_to/multi_vector/", "Zep Open Source": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/"}, "init_chat_model": {"How to init any model in one line": "https://python.langchain.com/v0.2/docs/how_to/chat_models_universal_init/", "How to select examples from a LangSmith dataset": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_langsmith/"}, "SQLChatMessageHistory": {"How to add message history": "https://python.langchain.com/v0.2/docs/how_to/message_history/", "SQL (SQLAlchemy)": "https://python.langchain.com/v0.2/docs/integrations/memory/sql_chat_message_history/", "SQLite": "https://python.langchain.com/v0.2/docs/integrations/providers/sqlite/"}, "ConfigurableFieldSpec": {"How to add message history": "https://python.langchain.com/v0.2/docs/how_to/message_history/"}, "LlamaCpp": {"Run models locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/", "Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/"}, "CallbackManager": {"Run models locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/", "ChatLiteLLM": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/", "GPTRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/", "ChatLiteLLMRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/", "ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "Titan Takeoff": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/"}, "StreamingStdOutCallbackHandler": {"Run models locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/", "DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/chat/deepinfra/", "ChatLiteLLM": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/", "ChatEverlyAI": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/", "GPTRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/", "ChatLiteLLMRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/", "ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/providers/gpt4all/", "Arthur": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/", "TextGen": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/llms/llamacpp/", "Titan Takeoff": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "C Transformers": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/", "ExLlamaV2": "https://python.langchain.com/v0.2/docs/integrations/llms/exllamav2/", "Huggingface Endpoints": "https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_endpoint/", "Replicate": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/"}, "GPT4All": {"Run models locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/", "PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/callbacks/promptlayer/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/llms/gpt4all/"}, "Llamafile": {"Run models locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/", "Llamafile": "https://python.langchain.com/v0.2/docs/integrations/llms/llamafile/"}, "ConditionalPromptSelector": {"Run models locally": "https://python.langchain.com/v0.2/docs/how_to/local_llms/"}, "HubRunnable": {"How to configure runtime chain internals": "https://python.langchain.com/v0.2/docs/how_to/configure/"}, "ContextualCompressionRetriever": {"How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/", "Cohere reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "RAGatouille": "https://python.langchain.com/v0.2/docs/integrations/providers/ragatouille/", "VoyageAI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/voyageai-reranker/", "RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/", "Volcengine Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/volcengine_rerank/", "OpenVINO Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/", "Google Cloud Vertex AI Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/google_cloud_vertexai_rerank/", "Cross Encoder Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/", "DashScope Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/dashscope_rerank/"}, "LLMChainExtractor": {"How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/"}, "LLMChainFilter": {"How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/"}, "LLMListwiseRerank": {"How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/"}, "EmbeddingsFilter": {"How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/"}, "DocumentCompressorPipeline": {"How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/"}, "EmbeddingsRedundantFilter": {"How to do retrieval with contextual compression": "https://python.langchain.com/v0.2/docs/how_to/contextual_compression/", "LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/"}, "Comparator": {"How to construct filters for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_constructing_filters/"}, "Comparison": {"How to construct filters for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_constructing_filters/"}, "Operation": {"How to construct filters for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_constructing_filters/"}, "Operator": {"How to construct filters for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_constructing_filters/"}, "StructuredQuery": {"How to construct filters for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_constructing_filters/"}, "ChromaTranslator": {"How to construct filters for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_constructing_filters/", "How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/"}, "ElasticsearchTranslator": {"How to construct filters for query analysis": "https://python.langchain.com/v0.2/docs/how_to/query_constructing_filters/"}, "WikipediaQueryRun": {"How to use built-in tools and toolkits": "https://python.langchain.com/v0.2/docs/how_to/tools_builtin/", "Wikipedia": "https://python.langchain.com/v0.2/docs/integrations/tools/wikipedia/"}, "WikipediaAPIWrapper": {"How to use built-in tools and toolkits": "https://python.langchain.com/v0.2/docs/how_to/tools_builtin/", "Wikipedia": "https://python.langchain.com/v0.2/docs/integrations/tools/wikipedia/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/"}, "CallbackManagerForRetrieverRun": {"How to create a custom Retriever": "https://python.langchain.com/v0.2/docs/how_to/custom_retriever/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/"}, "BaseRetriever": {"How to create a custom Retriever": "https://python.langchain.com/v0.2/docs/how_to/custom_retriever/"}, "LLMGraphTransformer": {"How to construct knowledge graphs": "https://python.langchain.com/v0.2/docs/how_to/graph_constructing/"}, "RetryOutputParser": {"How to retry when a parsing error occurs": "https://python.langchain.com/v0.2/docs/how_to/output_parser_retry/"}, "TimeWeightedVectorStoreRetriever": {"How to use a time-weighted vector store retriever": "https://python.langchain.com/v0.2/docs/how_to/time_weighted_vectorstore/"}, "InMemoryDocstore": {"How to use a time-weighted vector store retriever": "https://python.langchain.com/v0.2/docs/how_to/time_weighted_vectorstore/", "Annoy": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/", "Faiss": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/faiss/"}, "mock_now": {"How to use a time-weighted vector store retriever": "https://python.langchain.com/v0.2/docs/how_to/time_weighted_vectorstore/"}, "RunnableGenerator": {"How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/"}, "OutputParserException": {"How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/"}, "BaseOutputParser": {"How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/", "How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/"}, "BaseGenerationOutputParser": {"How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/"}, "Generation": {"How to create a custom Output Parser": "https://python.langchain.com/v0.2/docs/how_to/output_parser_custom/"}, "DirectoryLoader": {"How to load documents from a directory": "https://python.langchain.com/v0.2/docs/how_to/document_loader_directory/", "AzureAISearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/", "StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/"}, "PythonLoader": {"How to load documents from a directory": "https://python.langchain.com/v0.2/docs/how_to/document_loader_directory/"}, "LanceDB": {"How to create and query vector stores": "https://python.langchain.com/v0.2/docs/how_to/vectorstores/", "LanceDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/"}, "SpacyTextSplitter": {"How to split text by tokens ": "https://python.langchain.com/v0.2/docs/how_to/split_by_token/", "spaCy": "https://python.langchain.com/v0.2/docs/integrations/providers/spacy/", "Atlas": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/atlas/"}, "SentenceTransformersTokenTextSplitter": {"How to split text by tokens ": "https://python.langchain.com/v0.2/docs/how_to/split_by_token/"}, "NLTKTextSplitter": {"How to split text by tokens ": "https://python.langchain.com/v0.2/docs/how_to/split_by_token/"}, "KonlpyTextSplitter": {"How to split text by tokens ": "https://python.langchain.com/v0.2/docs/how_to/split_by_token/"}, "WikipediaRetriever": {"How to get a RAG application to add citations": "https://python.langchain.com/v0.2/docs/how_to/qa_citations/", "WikipediaRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/wikipedia/", "Wikipedia": "https://python.langchain.com/v0.2/docs/integrations/providers/wikipedia/"}, "UnstructuredHTMLLoader": {"How to load HTML": "https://python.langchain.com/v0.2/docs/how_to/document_loader_html/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/"}, "MultiQueryRetriever": {"How to use the MultiQueryRetriever": "https://python.langchain.com/v0.2/docs/how_to/MultiQueryRetriever/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vectara/"}, "GraphCypherQAChain": {"How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "Neo4j": "https://python.langchain.com/v0.2/docs/integrations/graphs/neo4j_cypher/", "Memgraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/", "Diffbot": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/", "Apache AGE": "https://python.langchain.com/v0.2/docs/integrations/graphs/apache_age/", "Build a Question Answering application over a Graph Database": "https://python.langchain.com/v0.2/docs/tutorials/graph/"}, "Neo4jVector": {"How to best prompt for Graph-RAG": "https://python.langchain.com/v0.2/docs/how_to/graph_prompting/", "Neo4j": "https://python.langchain.com/v0.2/docs/integrations/providers/neo4j/", "Neo4j Vector Index": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/"}, "ParentDocumentRetriever": {"How to use the Parent Document Retriever": "https://python.langchain.com/v0.2/docs/how_to/parent_document_retriever/"}, "InMemoryStore": {"How to use the Parent Document Retriever": "https://python.langchain.com/v0.2/docs/how_to/parent_document_retriever/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/"}, "YamlOutputParser": {"How to parse YAML output": "https://python.langchain.com/v0.2/docs/how_to/output_parser_yaml/"}, "PipelinePromptTemplate": {"How to compose prompts together": "https://python.langchain.com/v0.2/docs/how_to/prompts_composition/"}, "CacheBackedEmbeddings": {"Caching": "https://python.langchain.com/v0.2/docs/how_to/caching_embeddings/"}, "LocalFileStore": {"Caching": "https://python.langchain.com/v0.2/docs/how_to/caching_embeddings/", "LocalFileStore": "https://python.langchain.com/v0.2/docs/integrations/stores/file_system/"}, "Ollama": {"How to add ad-hoc tool calling capability to LLMs and Chat Models": "https://python.langchain.com/v0.2/docs/how_to/tools_prompting/", "ApertureDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/aperturedb/"}, "render_text_description": {"How to add ad-hoc tool calling capability to LLMs and Chat Models": "https://python.langchain.com/v0.2/docs/how_to/tools_prompting/", "MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/"}, "RunnableSerializable": {"LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/"}, "Run": {"LangChain Expression Language Cheatsheet": "https://python.langchain.com/v0.2/docs/how_to/lcel_cheatsheet/"}, "MarkdownHeaderTextSplitter": {"How to split Markdown by Headers": "https://python.langchain.com/v0.2/docs/how_to/markdown_header_metadata_splitter/"}, "HTMLHeaderTextSplitter": {"How to split by HTML header ": "https://python.langchain.com/v0.2/docs/how_to/HTML_header_metadata_splitter/"}, "EnsembleRetriever": {"How to combine results from multiple retrievers": "https://python.langchain.com/v0.2/docs/how_to/ensemble_retriever/"}, "BM25Retriever": {"How to combine results from multiple retrievers": "https://python.langchain.com/v0.2/docs/how_to/ensemble_retriever/", "BM25": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bm25/", "Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/"}, "ChatMistralAI": {"Response metadata": "https://python.langchain.com/v0.2/docs/how_to/response_metadata/", "ChatMistralAI": "https://python.langchain.com/v0.2/docs/integrations/chat/mistralai/", "MistralAI": "https://python.langchain.com/v0.2/docs/integrations/providers/mistralai/", "Build an Extraction Chain": "https://python.langchain.com/v0.2/docs/tutorials/extraction/"}, "ChatGroq": {"Response metadata": "https://python.langchain.com/v0.2/docs/how_to/response_metadata/", "ChatGroq": "https://python.langchain.com/v0.2/docs/integrations/chat/groq/"}, "set_verbose": {"How to debug your LLM apps": "https://python.langchain.com/v0.2/docs/how_to/debugging/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/"}, "set_debug": {"How to debug your LLM apps": "https://python.langchain.com/v0.2/docs/how_to/debugging/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/", "TextGen": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/"}, "MaxMarginalRelevanceExampleSelector": {"How to select examples by maximal marginal relevance (MMR)": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_mmr/"}, "AttributeInfo": {"How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "Milvus": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/milvus_self_query/", "PGVector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pgvector_self_query/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/weaviate_self_query/", "Vectara self-querying ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/hanavector_self_query/", "DashVector": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/databricks_vector_search/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/opensearch_self_query/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/elasticsearch_self_query/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/chroma_self_query/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/tencentvectordb/", "Timescale Vector (Postgres) ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "Astra DB (Cassandra)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/astradb/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/supabase_self_query/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/redis_self_query/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/myscale_self_query/", "Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Qdrant": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/"}, "SelfQueryRetriever": {"How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/", "How to add scores to retriever results": "https://python.langchain.com/v0.2/docs/how_to/add_scores_retriever/", "Milvus": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/milvus_self_query/", "PGVector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pgvector_self_query/", "Weaviate": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/weaviate_self_query/", "Vectara self-querying ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/hanavector_self_query/", "DashVector": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/databricks_vector_search/", "DingoDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dingo/", "OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/opensearch_self_query/", "Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/elasticsearch_self_query/", "Chroma": "https://python.langchain.com/v0.2/docs/integrations/providers/chroma/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/tencentvectordb/", "Timescale Vector (Postgres) ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "Astra DB (Cassandra)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/astradb/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pinecone/", "Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/supabase_self_query/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/redis_self_query/", "MyScale": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/myscale_self_query/", "Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/", "Qdrant": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "Astra DB": "https://python.langchain.com/v0.2/docs/integrations/providers/astradb/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "Docugami": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docugami/"}, "StructuredQueryOutputParser": {"How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/hanavector_self_query/"}, "get_query_constructor_prompt": {"How to do \"self-querying\" retrieval": "https://python.langchain.com/v0.2/docs/how_to/self_query/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/hanavector_self_query/"}, "add": {"How to select examples from a LangSmith dataset": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_langsmith/"}, "cos": {"How to select examples from a LangSmith dataset": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_langsmith/"}, "divide": {"How to select examples from a LangSmith dataset": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_langsmith/"}, "log": {"How to select examples from a LangSmith dataset": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_langsmith/"}, "multiply": {"How to select examples from a LangSmith dataset": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_langsmith/"}, "negate": {"How to select examples from a LangSmith dataset": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_langsmith/"}, "pi": {"How to select examples from a LangSmith dataset": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_langsmith/"}, "power": {"How to select examples from a LangSmith dataset": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_langsmith/"}, "sin": {"How to select examples from a LangSmith dataset": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_langsmith/"}, "subtract": {"How to select examples from a LangSmith dataset": "https://python.langchain.com/v0.2/docs/how_to/example_selectors_langsmith/"}, "adispatch_custom_event": {"How to dispatch custom callback events": "https://python.langchain.com/v0.2/docs/how_to/callbacks_custom_events/"}, "dispatch_custom_event": {"How to dispatch custom callback events": "https://python.langchain.com/v0.2/docs/how_to/callbacks_custom_events/"}, "Cassandra": {"Hybrid Search": "https://python.langchain.com/v0.2/docs/how_to/hybrid/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/", "Apache Cassandra": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/cassandra/"}, "HTMLSectionSplitter": {"How to split by HTML sections": "https://python.langchain.com/v0.2/docs/how_to/HTML_section_aware_splitter/"}, "JSONLoader": {"How to load JSON": "https://python.langchain.com/v0.2/docs/how_to/document_loader_json/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/", "JSONLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/json/"}, "UpstashRedisCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Upstash Vector": "https://python.langchain.com/v0.2/docs/integrations/providers/upstash/"}, "RedisCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/redis/"}, "RedisSemanticCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Redis": "https://python.langchain.com/v0.2/docs/integrations/providers/redis/"}, "GPTCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/"}, "MomentoCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/"}, "SQLAlchemyCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/"}, "CassandraCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/"}, "CassandraSemanticCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/"}, "AzureCosmosDBSemanticCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/"}, "CosmosDBSimilarityType": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Azure Cosmos DB Mongo vCore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/"}, "CosmosDBVectorSearchType": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Azure Cosmos DB Mongo vCore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/"}, "load_summarize_chain": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/", "Infino": "https://python.langchain.com/v0.2/docs/integrations/callbacks/infino/", "LarkSuite (FeiShu)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/larksuite/"}, "OpenSearchSemanticCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/"}, "SingleStoreDBSemanticCache": {"Model caches": "https://python.langchain.com/v0.2/docs/integrations/llm_caching/"}, "map_ai_messages": {"WeChat": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/wechat/", "Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "GMail": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/gmail/", "Slack": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/slack/", "WhatsApp": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/whatsapp/", "iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "Telegram": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/telegram/", "Discord": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/"}, "merge_chat_runs": {"WeChat": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/wechat/", "Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "Slack": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/slack/", "WhatsApp": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/whatsapp/", "iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "Telegram": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/telegram/", "Discord": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/"}, "ChatSession": {"WeChat": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/wechat/", "Slack": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/slack/", "WhatsApp": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/whatsapp/", "iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/", "Telegram": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/telegram/", "Discord": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/discord/"}, "FolderFacebookMessengerChatLoader": {"Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/"}, "SingleFileFacebookMessengerChatLoader": {"Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/"}, "convert_messages_for_finetuning": {"Facebook Messenger": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/facebook/", "LangSmith LLM Runs": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/", "LangSmith Chat Datasets": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_dataset/", "iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/"}, "convert_message_to_dict": {"Twitter (via Apify)": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/twitter/"}, "convert_pydantic_to_openai_function": {"LangSmith LLM Runs": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/"}, "PydanticOutputFunctionsParser": {"LangSmith LLM Runs": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/"}, "LangSmithRunChatLoader": {"LangSmith LLM Runs": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_llm_runs/"}, "GMailLoader": {"GMail": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/gmail/"}, "SlackChatLoader": {"Slack": "https://python.langchain.com/v0.2/docs/integrations/providers/slack/"}, "WhatsAppChatLoader": {"WhatsApp": "https://python.langchain.com/v0.2/docs/integrations/providers/whatsapp/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/", "WhatsApp Chat": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/whatsapp_chat/"}, "LangSmithDatasetChatLoader": {"LangSmith Chat Datasets": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/langsmith_dataset/"}, "IMessageChatLoader": {"iMessage": "https://python.langchain.com/v0.2/docs/integrations/chat_loaders/imessage/"}, "TelegramChatLoader": {"Telegram": "https://python.langchain.com/v0.2/docs/integrations/providers/telegram/"}, "BookendEmbeddings": {"Bookend AI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/bookend/"}, "SolarEmbeddings": {"Solar": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/solar/"}, "HuggingFaceBgeEmbeddings": {"BGE on Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/bge_huggingface/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/"}, "IpexLLMBgeEmbeddings": {"Local BGE Embeddings with IPEX-LLM on Intel CPU": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ipex_llm/", "Local BGE Embeddings with IPEX-LLM on Intel GPU": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ipex_llm_gpu/"}, "QuantizedBiEncoderEmbeddings": {"Embedding Documents using Optimized and Quantized Embedders": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/optimum_intel/", "Intel": "https://python.langchain.com/v0.2/docs/integrations/providers/intel/"}, "XinferenceEmbeddings": {"Xorbits inference (Xinference)": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/xinference/"}, "LLMRailsEmbeddings": {"LLMRails": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/llm_rails/"}, "AscendEmbeddings": {"# Related": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ascend/", "Ascend": "https://python.langchain.com/v0.2/docs/integrations/providers/ascend/"}, "DeepInfraEmbeddings": {"DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/providers/deepinfra/"}, "HuggingFaceInferenceAPIEmbeddings": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/huggingfacehub/"}, "GPT4AllEmbeddings": {"GPT4All": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/gpt4all/", "ManticoreSearch VectorStore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/manticore_search/"}, "MosaicMLInstructorEmbeddings": {"MosaicML": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/mosaicml/"}, "QuantizedBgeEmbeddings": {"Intel\u00ae Extension for Transformers Quantized Text Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/itrex/", "Intel": "https://python.langchain.com/v0.2/docs/integrations/providers/intel/"}, "BedrockEmbeddings": {"Bedrock": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/bedrock/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon MemoryDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/memorydb/"}, "GigaChatEmbeddings": {"GigaChat": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/gigachat/", "Salute Devices": "https://python.langchain.com/v0.2/docs/integrations/providers/salute_devices/"}, "OCIGenAIEmbeddings": {"Oracle Cloud Infrastructure Generative AI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/oci_generative_ai/", "Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/"}, "OVHCloudEmbeddings": {"OVHcloud": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ovhcloud/"}, "FastEmbedEmbeddings": {"FastEmbed by Qdrant": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/fastembed/"}, "LlamaCppEmbeddings": {"Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/providers/llamacpp/"}, "NLPCloudEmbeddings": {"NLP Cloud": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nlp_cloud/", "NLPCloud": "https://python.langchain.com/v0.2/docs/integrations/providers/nlpcloud/"}, "TextEmbedEmbeddings": {"TextEmbed - Embedding Inference Server": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/textembed/"}, "LaserEmbeddings": {"LASER Language-Agnostic SEntence Representations Embeddings by Meta AI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/laser/", "Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/"}, "OpenCLIPEmbeddings": {"OpenClip": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/open_clip/", "LanceDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lancedb/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/"}, "TitanTakeoffEmbed": {"Titan Takeoff": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/titan_takeoff/"}, "MistralAIEmbeddings": {"MistralAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/mistralai/", "MistralAI": "https://python.langchain.com/v0.2/docs/integrations/providers/mistralai/"}, "SpacyEmbeddings": {"SpaCy": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/spacy_embedding/", "NanoPQ (Product Quantization)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/nanopq/", "spaCy": "https://python.langchain.com/v0.2/docs/integrations/providers/spacy/"}, "DatabricksEmbeddings": {"Databricks": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/databricks/"}, "BaichuanTextEmbeddings": {"Baichuan Text Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/baichuan/", "Baichuan": "https://python.langchain.com/v0.2/docs/integrations/providers/baichuan/"}, "TogetherEmbeddings": {"TogetherEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/together/"}, "HuggingFaceInstructEmbeddings": {"Instruct Embeddings on Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/instruct_embeddings/", "Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/"}, "OracleEmbeddings": {"Oracle AI Vector Search: Generate Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/oracleai/", "OracleAI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/providers/oracleai/"}, "QianfanEmbeddingsEndpoint": {"Baidu Qianfan": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/baidu_qianfan_endpoint/", "ERNIE": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ernie/", "Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/", "Baidu Cloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/"}, "EdenAiEmbeddings": {"EDEN AI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/edenai/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "JohnSnowLabsEmbeddings": {"John Snow Labs": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/johnsnowlabs_embedding/"}, "ErnieEmbeddings": {"ERNIE": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/ernie/"}, "ClarifaiEmbeddings": {"Clarifai": "https://python.langchain.com/v0.2/docs/integrations/providers/clarifai/"}, "AzureOpenAIEmbeddings": {"AzureOpenAIEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/azureopenai/", "AzureAISearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Cosmos DB No SQL": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db_no_sql/", "Azure AI Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/"}, "InfinityEmbeddings": {"Infinity": "https://python.langchain.com/v0.2/docs/integrations/providers/infinity/"}, "InfinityEmbeddingsLocal": {"Infinity": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/infinity/"}, "AwaEmbeddings": {"AwaDB": "https://python.langchain.com/v0.2/docs/integrations/providers/awadb/"}, "VolcanoEmbeddings": {"Volc Engine": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/volcengine/"}, "MiniMaxEmbeddings": {"MiniMax": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/minimax/", "Minimax": "https://python.langchain.com/v0.2/docs/integrations/providers/minimax/"}, "FakeEmbeddings": {"Fake Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/fake/", "DocArray": "https://python.langchain.com/v0.2/docs/integrations/retrievers/docarray_retriever/", "Relyt": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/relyt/", "Tair": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/", "Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "Google Memorystore for Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_memorystore_redis/", "PGVecto.rs": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/", "Baidu VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/"}, "ClovaEmbeddings": {"Clova Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/clova/"}, "NeMoEmbeddings": {"NVIDIA NeMo embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/nemo/"}, "SparkLLMTextEmbeddings": {"SparkLLM Text Embeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sparkllm/", "iFlytek": "https://python.langchain.com/v0.2/docs/integrations/providers/iflytek/"}, "PremAIEmbeddings": {"PremAI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/premai/"}, "KNNRetriever": {"Voyage AI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/voyageai/", "kNN": "https://python.langchain.com/v0.2/docs/integrations/retrievers/knn/"}, "SelfHostedEmbeddings": {"Self Hosted": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/self-hosted/"}, "SelfHostedHuggingFaceEmbeddings": {"Self Hosted": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/self-hosted/"}, "SelfHostedHuggingFaceInstructEmbeddings": {"Self Hosted": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/self-hosted/"}, "AnyscaleEmbeddings": {"Anyscale": "https://python.langchain.com/v0.2/docs/integrations/providers/anyscale/"}, "EmbaasEmbeddings": {"Embaas": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/embaas/"}, "YandexGPTEmbeddings": {"YandexGPT": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/yandex/"}, "JinaEmbeddings": {"Jina": "https://python.langchain.com/v0.2/docs/integrations/providers/jina/", "Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/"}, "AlephAlphaAsymmetricSemanticEmbedding": {"Aleph Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/aleph_alpha/"}, "AlephAlphaSymmetricSemanticEmbedding": {"Aleph Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/aleph_alpha/"}, "CloudflareWorkersAIEmbeddings": {"Cloudflare Workers AI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/cloudflare_workersai/", "Cloudflare": "https://python.langchain.com/v0.2/docs/integrations/providers/cloudflare/"}, "DashScopeEmbeddings": {"DashScope": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/dashscope/", "DashVector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/", "DashScope Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/dashscope_rerank/"}, "TensorflowHubEmbeddings": {"TensorFlow Hub": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/tensorflowhub/"}, "LlamafileEmbeddings": {"llamafile": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/llamafile/"}, "GradientEmbeddings": {"Gradient": "https://python.langchain.com/v0.2/docs/integrations/providers/gradient/"}, "ModelScopeEmbeddings": {"ModelScope": "https://python.langchain.com/v0.2/docs/integrations/providers/modelscope/"}, "SagemakerEndpointEmbeddings": {"SageMaker": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sagemaker-endpoint/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "EmbeddingsContentHandler": {"SageMaker": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sagemaker-endpoint/"}, "DocArrayInMemorySearch": {"UpstageEmbeddings": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/upstage/", "DocArray InMemorySearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_in_memory/"}, "SambaStudioEmbeddings": {"SambaNova": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/sambanova/"}, "OpenVINOEmbeddings": {"OpenVINO": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/openvino/", "OpenVINO Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/"}, "OpenVINOBgeEmbeddings": {"OpenVINO": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/openvino/"}, "LocalAIEmbeddings": {"LocalAI": "https://python.langchain.com/v0.2/docs/integrations/text_embedding/localai/"}, "AzureAISearchRetriever": {"AzureAISearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/"}, "AzureSearch": {"AzureAISearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/azure_ai_search/", "Azure AI Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azuresearch/"}, "RePhraseQueryRetriever": {"RePhraseQuery": "https://python.langchain.com/v0.2/docs/integrations/retrievers/re_phrase/"}, "YouSearchAPIWrapper": {"You.com": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/", "You.com Search": "https://python.langchain.com/v0.2/docs/integrations/tools/you/"}, "YouRetriever": {"You.com": "https://python.langchain.com/v0.2/docs/integrations/retrievers/you-retriever/"}, "Kinetica": {"Kinetica Vectorstore based Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kinetica/", "Kinetica": "https://python.langchain.com/v0.2/docs/integrations/providers/kinetica/", "Kinetica Vectorstore API": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/"}, "KineticaSettings": {"Kinetica Vectorstore based Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kinetica/", "Kinetica Vectorstore API": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/", "Kinetica": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/kinetica/"}, "Jaguar": {"JaguarDB Vector Database": "https://python.langchain.com/v0.2/docs/integrations/retrievers/jaguar/", "Jaguar": "https://python.langchain.com/v0.2/docs/integrations/providers/jaguar/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/"}, "BaseStore": {"Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/"}, "VectorStore": {"Fleet AI Context": "https://python.langchain.com/v0.2/docs/integrations/retrievers/fleet_context/"}, "AskNewsRetriever": {"AskNews": "https://python.langchain.com/v0.2/docs/integrations/retrievers/asknews/"}, "LLMLinguaCompressor": {"LLMLingua Document Compressor": "https://python.langchain.com/v0.2/docs/integrations/retrievers/llmlingua/"}, "ElasticSearchBM25Retriever": {"ElasticSearch BM25": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elastic_search_bm25/"}, "OutlineRetriever": {"Outline": "https://python.langchain.com/v0.2/docs/integrations/providers/outline/"}, "ZepMemory": {"Zep Open Source": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/"}, "SearchScope": {"Zep Open Source": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/"}, "ZepRetriever": {"Zep Open Source": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_memorystore/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/"}, "VespaRetriever": {"Vespa": "https://python.langchain.com/v0.2/docs/integrations/providers/vespa/"}, "AmazonKendraRetriever": {"Amazon Kendra": "https://python.langchain.com/v0.2/docs/integrations/retrievers/amazon_kendra_retriever/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "AmazonKnowledgeBasesRetriever": {"Bedrock (Knowledge Bases) Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bedrock/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "Bedrock": {"Bedrock (Knowledge Bases) Retriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/bedrock/"}, "CohereEmbeddings": {"Cohere reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/"}, "Cohere": {"Cohere reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/cohere-reranker/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/"}, "ZepCloudMemory": {"Zep Cloud": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_cloud_memorystore/", "ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/"}, "ZepCloudRetriever": {"Zep Cloud": "https://python.langchain.com/v0.2/docs/integrations/retrievers/zep_cloud_memorystore/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/", "Zep": "https://python.langchain.com/v0.2/docs/integrations/providers/zep/"}, "NeuralDBRetriever": {"**NeuralDB**": "https://python.langchain.com/v0.2/docs/integrations/retrievers/thirdai_neuraldb/"}, "SingleStoreDB": {"SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/"}, "MetalRetriever": {"Metal": "https://python.langchain.com/v0.2/docs/integrations/providers/metal/"}, "BreebsRetriever": {"BREEBS (Open Knowledge)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/breebs/"}, "NanoPQRetriever": {"NanoPQ (Product Quantization)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/nanopq/"}, "ChatGPTPluginRetriever": {"ChatGPT plugin": "https://python.langchain.com/v0.2/docs/integrations/retrievers/chatgpt-plugin/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/"}, "KayAiRetriever": {"SEC filing": "https://python.langchain.com/v0.2/docs/integrations/retrievers/sec_filings/", "Kay.ai": "https://python.langchain.com/v0.2/docs/integrations/retrievers/kay/"}, "DriaRetriever": {"Dria": "https://python.langchain.com/v0.2/docs/integrations/retrievers/dria_index/"}, "DocArrayRetriever": {"DocArray": "https://python.langchain.com/v0.2/docs/integrations/retrievers/docarray_retriever/"}, "SVMRetriever": {"SVM": "https://python.langchain.com/v0.2/docs/integrations/retrievers/svm/", "scikit-learn": "https://python.langchain.com/v0.2/docs/integrations/providers/sklearn/"}, "TavilySearchAPIRetriever": {"TavilySearchAPIRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tavily/"}, "PineconeHybridSearchRetriever": {"Pinecone Hybrid Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/pinecone_hybrid_search/", "Pinecone": "https://python.langchain.com/v0.2/docs/integrations/providers/pinecone/"}, "DeepLake": {"Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query/", "Activeloop Deep Lake": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/activeloop_deeplake/"}, "AsyncHtmlLoader": {"Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "HTML to text": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/html2text/", "Markdownify": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/markdownify/", "AsyncHtml": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/async_html/"}, "Html2TextTransformer": {"Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "HTML to text": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/html2text/", "Async Chromium": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/async_chromium/"}, "create_structured_output_chain": {"Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/"}, "HumanMessagePromptTemplate": {"Activeloop Deep Memory": "https://python.langchain.com/v0.2/docs/integrations/retrievers/activeloop/", "JinaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/jinachat/", "Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/", "vLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "Context": "https://python.langchain.com/v0.2/docs/integrations/callbacks/context/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Figma": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/"}, "PubMedRetriever": {"PubMed": "https://python.langchain.com/v0.2/docs/integrations/providers/pubmed/"}, "WeaviateHybridSearchRetriever": {"Weaviate Hybrid Search": "https://python.langchain.com/v0.2/docs/integrations/retrievers/weaviate-hybrid/"}, "EmbedchainRetriever": {"Embedchain": "https://python.langchain.com/v0.2/docs/integrations/retrievers/embedchain/"}, "ArxivRetriever": {"ArxivRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/arxiv/", "Arxiv": "https://python.langchain.com/v0.2/docs/integrations/providers/arxiv/"}, "QdrantSparseVectorRetriever": {"Qdrant Sparse Vector": "https://python.langchain.com/v0.2/docs/integrations/retrievers/qdrant-sparse/"}, "DeterministicFakeEmbedding": {"ElasticsearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/"}, "Embeddings": {"ElasticsearchRetriever": "https://python.langchain.com/v0.2/docs/integrations/retrievers/elasticsearch_retriever/", "Infinispan": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/infinispanvs/"}, "RememberizerRetriever": {"Rememberizer": "https://python.langchain.com/v0.2/docs/integrations/retrievers/rememberizer/"}, "ArceeRetriever": {"Arcee": "https://python.langchain.com/v0.2/docs/integrations/providers/arcee/"}, "FlashrankRerank": {"FlashRank reranker": "https://python.langchain.com/v0.2/docs/integrations/retrievers/flashrank-reranker/", "UpTrain": "https://python.langchain.com/v0.2/docs/integrations/callbacks/uptrain/"}, "ChaindeskRetriever": {"Chaindesk": "https://python.langchain.com/v0.2/docs/integrations/providers/chaindesk/"}, "MergerRetriever": {"LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/"}, "EmbeddingsClusteringFilter": {"LOTR (Merger Retriever)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/merger_retriever/"}, "TFIDFRetriever": {"TF-IDF": "https://python.langchain.com/v0.2/docs/integrations/retrievers/tf_idf/"}, "PGVector": {"PGVector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/pgvector_self_query/", "PGVector": "https://python.langchain.com/v0.2/docs/integrations/providers/pgvector/"}, "Weaviate": {"Weaviate": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/weaviate_self_query/"}, "Vectara": {"Vectara self-querying ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/vectara_self_query/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vectara/", "Vectara Chat": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/"}, "HanaDB": {"SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "SAP": "https://python.langchain.com/v0.2/docs/integrations/providers/sap/"}, "HanaTranslator": {"SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/hanavector_self_query/"}, "DashVector": {"DashVector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dashvector/"}, "Tongyi": {"DashVector": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/dashvector/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "Tongyi Qwen": "https://python.langchain.com/v0.2/docs/integrations/llms/tongyi/"}, "DatabricksVectorSearch": {"Databricks Vector Search": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/databricks_vector_search/"}, "Dingo": {"DingoDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/dingo/"}, "OpenSearchVectorSearch": {"OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/opensearch/", "AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "ConnectionParams": {"Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "Baidu VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/"}, "MetaField": {"Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/"}, "TencentVectorDB": {"Tencent Cloud VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tencentvectordb/", "Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/"}, "TimescaleVector": {"Timescale Vector (Postgres) ": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/timescalevector_self_query/", "Timescale Vector (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/timescalevector/"}, "AstraDB": {"Astra DB (Cassandra)": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/astradb/"}, "SupabaseVectorStore": {"Supabase (Postgres)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/"}, "Redis": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "MyScale": {"MyScale": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/myscale/"}, "MongoDBAtlasVectorSearch": {"MongoDB Atlas": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/mongodb_atlas/"}, "Qdrant": {"Qdrant": "https://python.langchain.com/v0.2/docs/integrations/retrievers/self_query/qdrant_self_query/", "Identity-enabled RAG using PebbloRetrievalQA": "https://python.langchain.com/v0.2/docs/integrations/providers/pebblo/pebblo_retrieval_qa/"}, "AzureChatOpenAI": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "Bing Search": "https://python.langchain.com/v0.2/docs/integrations/tools/bing_search/", "AzureChatOpenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/azure_chat_openai/", "Azure Cosmos DB for Apache Gremlin": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "AzureMLOnlineEndpoint": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "AzureOpenAI": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "Azure OpenAI": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_openai/"}, "AzureAIDataLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure AI Data": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_ai_data/"}, "AzureBlobStorageContainerLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Blob Storage Container": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_blob_storage_container/"}, "AzureBlobStorageFileLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Blob Storage File": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azure_blob_storage_file/"}, "OneDriveLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Microsoft OneDrive": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_onedrive/"}, "OneDriveFileLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/"}, "UnstructuredWordDocumentLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Microsoft Word": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_word/"}, "UnstructuredExcelLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Microsoft Excel": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_excel/"}, "SharePointLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Microsoft SharePoint": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_sharepoint/"}, "UnstructuredPowerPointLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Microsoft PowerPoint": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_powerpoint/"}, "OneNoteLoader": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Microsoft OneNote": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_onenote/"}, "AzureCosmosDBVectorSearch": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Cosmos DB Mongo vCore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db/"}, "BingSearchResults": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Bing Search": "https://python.langchain.com/v0.2/docs/integrations/tools/bing_search/"}, "BingSearchAPIWrapper": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Bing Search": "https://python.langchain.com/v0.2/docs/integrations/tools/bing_search/"}, "O365Toolkit": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Office365 Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/office365/"}, "PowerBIToolkit": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "PowerBI Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/powerbi/"}, "PowerBIDataset": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "PowerBI Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/powerbi/"}, "PlayWrightBrowserToolkit": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "PlayWright Browser Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/playwright/"}, "GremlinGraph": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Cosmos DB for Apache Gremlin": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "GraphDocument": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Cosmos DB for Apache Gremlin": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "Node": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Cosmos DB for Apache Gremlin": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "Relationship": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/", "Azure Cosmos DB for Apache Gremlin": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "PresidioAnonymizer": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/"}, "PresidioReversibleAnonymizer": {"Microsoft": "https://python.langchain.com/v0.2/docs/integrations/platforms/microsoft/"}, "BedrockLLM": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Bedrock": "https://python.langchain.com/v0.2/docs/integrations/llms/bedrock/"}, "AmazonAPIGateway": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon API Gateway": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/"}, "SagemakerEndpoint": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "SageMakerEndpoint": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/"}, "ContentHandlerBase": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "S3DirectoryLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "AWS S3 Directory": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/aws_s3_directory/"}, "S3FileLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "AWS S3 File": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/aws_s3_file/"}, "AmazonTextractPDFLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon Textract ": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/amazon_textract/"}, "AthenaLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Athena": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/athena/"}, "GlueCatalogLoader": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Glue Catalog": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/glue_catalog/"}, "DynamoDBChatMessageHistory": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "AWS DynamoDB": "https://python.langchain.com/v0.2/docs/integrations/memory/aws_dynamodb/"}, "NeptuneGraph": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon Neptune with Cypher": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/"}, "NeptuneAnalyticsGraph": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon Neptune with Cypher": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/"}, "NeptuneOpenCypherQAChain": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon Neptune with Cypher": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_open_cypher/"}, "NeptuneRdfGraph": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon Neptune with SPARQL": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_sparql/"}, "NeptuneSparqlQAChain": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "Amazon Neptune with SPARQL": "https://python.langchain.com/v0.2/docs/integrations/graphs/amazon_neptune_sparql/"}, "SageMakerCallbackHandler": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/", "SageMaker Tracking": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/"}, "AmazonComprehendModerationChain": {"AWS": "https://python.langchain.com/v0.2/docs/integrations/platforms/aws/"}, "HuggingFaceHubEmbeddings": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/"}, "HuggingFaceDatasetLoader": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "HuggingFace dataset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/hugging_face_dataset/"}, "load_huggingface_tool": {"Hugging Face": "https://python.langchain.com/v0.2/docs/integrations/platforms/huggingface/", "HuggingFace Hub Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/huggingface_tools/"}, "ChatGPTLoader": {"OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "ChatGPT Data": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/chatgpt_loader/"}, "DallEAPIWrapper": {"OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/", "Dall-E Image Generator": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/"}, "OpenAIModerationChain": {"OpenAI": "https://python.langchain.com/v0.2/docs/integrations/platforms/openai/"}, "GooglePalmEmbeddings": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/"}, "ScaNN": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "ScaNN": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/"}, "GoogleVertexAISearchRetriever": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/"}, "GoogleDocumentAIWarehouseRetriever": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/"}, "GoogleFinanceQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Finance": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/"}, "GoogleFinanceAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Finance": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/"}, "GoogleJobsQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Jobs": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/"}, "GoogleLensQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Lens": "https://python.langchain.com/v0.2/docs/integrations/tools/google_lens/"}, "GoogleLensAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Lens": "https://python.langchain.com/v0.2/docs/integrations/tools/google_lens/"}, "GooglePlacesTool": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Places": "https://python.langchain.com/v0.2/docs/integrations/tools/google_places/"}, "GoogleScholarQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Scholar": "https://python.langchain.com/v0.2/docs/integrations/tools/google_scholar/"}, "GoogleScholarAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Scholar": "https://python.langchain.com/v0.2/docs/integrations/tools/google_scholar/"}, "GoogleTrendsQueryRun": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Trends": "https://python.langchain.com/v0.2/docs/integrations/tools/google_trends/"}, "GoogleTrendsAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Trends": "https://python.langchain.com/v0.2/docs/integrations/tools/google_trends/"}, "SearchApiAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/"}, "SerpAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "SerpAPI": "https://python.langchain.com/v0.2/docs/integrations/providers/serpapi/", "MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/"}, "GoogleSerperAPIWrapper": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "Google Serper": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/"}, "YouTubeSearchTool": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "YouTube": "https://python.langchain.com/v0.2/docs/integrations/tools/youtube/"}, "YoutubeAudioLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/"}, "OpenAIWhisperParser": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "YouTube audio": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_audio/"}, "YoutubeLoader": {"Google": "https://python.langchain.com/v0.2/docs/integrations/platforms/google/", "YouTube": "https://python.langchain.com/v0.2/docs/integrations/providers/youtube/", "YouTube transcripts": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_transcript/", "Build a Query Analysis System": "https://python.langchain.com/v0.2/docs/tutorials/query_analysis/"}, "AnthropicLLM": {"Anthropic": "https://python.langchain.com/v0.2/docs/integrations/platforms/anthropic/", "AnthropicLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/anthropic/"}, "AIPluginTool": {"ChatGPT Plugins": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/"}, "AgentType": {"ChatGPT Plugins": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "Connery Toolkit and Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/connery/", "Jira Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/jira/", "Google Jobs": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "Google Serper": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "Azure Cognitive Services Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/azure_cognitive_services/", "E2B Data Analysis": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/", "Human as a tool": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "Natural Language API Toolkits": "https://python.langchain.com/v0.2/docs/integrations/tools/openapi_nla/", "Steam Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/steam/", "Yahoo Finance News": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/", "Google Finance": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "ClickUp Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/clickup/", "AWS Lambda": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "Google Drive": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "OpenWeatherMap": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "AINetwork Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/ainetwork/", "Memorize": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "PlayWright Browser Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/playwright/", "Eleven Labs Text2Speech": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "Office365 Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/office365/", "Bearly Code Interpreter": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/", "Pandas Dataframe": "https://python.langchain.com/v0.2/docs/integrations/tools/pandas/", "Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "NASA Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/nasa/", "Connery Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/connery_toolkit/", "GraphQL": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Gitlab Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/gitlab/", "Shell (bash)": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "Comet Tracing": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "Argilla": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "MLflow": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "WandB Tracing": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "ClearML": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "Amazon API Gateway": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/"}, "initialize_agent": {"ChatGPT Plugins": "https://python.langchain.com/v0.2/docs/integrations/tools/chatgpt_plugins/", "Connery Toolkit and Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/connery/", "Jira Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/jira/", "Google Jobs": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/", "Google Serper": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "Azure Cognitive Services Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/azure_cognitive_services/", "E2B Data Analysis": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/", "Human as a tool": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/", "Natural Language API Toolkits": "https://python.langchain.com/v0.2/docs/integrations/tools/openapi_nla/", "Steam Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/steam/", "Yahoo Finance News": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/", "Google Finance": "https://python.langchain.com/v0.2/docs/integrations/tools/google_finance/", "ClickUp Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/clickup/", "AWS Lambda": "https://python.langchain.com/v0.2/docs/integrations/tools/awslambda/", "Google Drive": "https://python.langchain.com/v0.2/docs/integrations/tools/google_drive/", "OpenWeatherMap": "https://python.langchain.com/v0.2/docs/integrations/tools/openweathermap/", "AINetwork Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/ainetwork/", "Memorize": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "PlayWright Browser Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/playwright/", "Eleven Labs Text2Speech": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "Office365 Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/office365/", "Bearly Code Interpreter": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/", "Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "NASA Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/nasa/", "Connery Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/connery_toolkit/", "GraphQL": "https://python.langchain.com/v0.2/docs/integrations/tools/graphql/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "Gradio": "https://python.langchain.com/v0.2/docs/integrations/tools/gradio_tools/", "SceneXplain": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/tools/edenai_tools/", "Dall-E Image Generator": "https://python.langchain.com/v0.2/docs/integrations/tools/dalle_image_generator/", "Gitlab Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/gitlab/", "Shell (bash)": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "Xata": "https://python.langchain.com/v0.2/docs/integrations/memory/xata_chat_message_history/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/", "SageMaker Tracking": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Comet Tracing": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "Argilla": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "MLflow": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/", "WandB Tracing": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/", "ClearML": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "Amazon API Gateway": "https://python.langchain.com/v0.2/docs/integrations/llms/amazon_api_gateway/"}, "DataForSeoAPIWrapper": {"DataForSEO": "https://python.langchain.com/v0.2/docs/integrations/providers/dataforseo/"}, "Tool": {"DataForSEO": "https://python.langchain.com/v0.2/docs/integrations/tools/dataforseo/", "Python REPL": "https://python.langchain.com/v0.2/docs/integrations/tools/python/", "Google Serper": "https://python.langchain.com/v0.2/docs/integrations/tools/google_serper/", "SerpAPI": "https://python.langchain.com/v0.2/docs/integrations/tools/serpapi/", "SearchApi": "https://python.langchain.com/v0.2/docs/integrations/providers/searchapi/", "Google Search": "https://python.langchain.com/v0.2/docs/integrations/tools/google_search/", "Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/", "Ionic Shopping Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "Zep Open Source Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory/", "Zep Cloud Memory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_memory_cloud/", "Serper - Google Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/google_serper/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/"}, "ConneryToolkit": {"Connery Toolkit and Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/connery/", "Connery Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/connery_toolkit/"}, "ConneryService": {"Connery Toolkit and Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/connery/", "Connery Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/connery_toolkit/"}, "DataheraldAPIWrapper": {"Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/dataherald/"}, "SearxSearchWrapper": {"SearxNG Search": "https://python.langchain.com/v0.2/docs/integrations/tools/searx_search/", "SearxNG Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/searx/"}, "JiraToolkit": {"Jira Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/jira/"}, "JiraAPIWrapper": {"Jira Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/jira/"}, "PythonREPL": {"Python REPL": "https://python.langchain.com/v0.2/docs/integrations/tools/python/"}, "GoogleJobsAPIWrapper": {"Google Jobs": "https://python.langchain.com/v0.2/docs/integrations/tools/google_jobs/"}, "InfobipAPIWrapper": {"Infobip": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/"}, "create_openai_functions_agent": {"Infobip": "https://python.langchain.com/v0.2/docs/integrations/tools/infobip/", "AskNews": "https://python.langchain.com/v0.2/docs/integrations/tools/asknews/", "Polygon IO Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon_toolkit/", "Semantic Scholar API Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/", "Passio NutritionAI": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/", "MultiOn Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/multion/", "You.com Search": "https://python.langchain.com/v0.2/docs/integrations/tools/you/", "Polygon IO Toolkit and Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "AskNewsSearch": {"AskNews": "https://python.langchain.com/v0.2/docs/integrations/tools/asknews/"}, "create_pbi_agent": {"PowerBI Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/powerbi/"}, "AzureCognitiveServicesToolkit": {"Azure Cognitive Services Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/azure_cognitive_services/"}, "E2BDataAnalysisTool": {"E2B Data Analysis": "https://python.langchain.com/v0.2/docs/integrations/tools/e2b_data_analysis/"}, "SQLDatabaseToolkit": {"SQLDatabase Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/sql_database/", "CnosDB": "https://python.langchain.com/v0.2/docs/integrations/providers/cnosdb/", "Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/"}, "HumanInputRun": {"Human as a tool": "https://python.langchain.com/v0.2/docs/integrations/tools/human_tools/"}, "FinancialDatasetsToolkit": {"FinancialDatasets Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/financial_datasets/"}, "FinancialDatasetsAPIWrapper": {"FinancialDatasets Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/financial_datasets/"}, "NLAToolkit": {"Natural Language API Toolkits": "https://python.langchain.com/v0.2/docs/integrations/tools/openapi_nla/"}, "Requests": {"Natural Language API Toolkits": "https://python.langchain.com/v0.2/docs/integrations/tools/openapi_nla/"}, "ZenGuardTool": {"ZenGuard AI": "https://python.langchain.com/v0.2/docs/integrations/tools/zenguard/"}, "Detector": {"ZenGuard AI": "https://python.langchain.com/v0.2/docs/integrations/tools/zenguard/"}, "SlackToolkit": {"Slack Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/slack/", "Slack": "https://python.langchain.com/v0.2/docs/integrations/providers/slack/"}, "SteamToolkit": {"Steam Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/steam/"}, "SteamWebAPIWrapper": {"Steam Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/steam/"}, "create_openai_tools_agent": {"Cassandra Database Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/cassandra_database/", "Log, Trace, and Monitor": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/logging_tracing_portkey/", "Portkey": "https://python.langchain.com/v0.2/docs/integrations/providers/portkey/index/"}, "CassandraDatabaseToolkit": {"Cassandra Database Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/cassandra_database/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/"}, "CassandraDatabase": {"Cassandra Database Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/cassandra_database/"}, "NucliaUnderstandingAPI": {"Nuclia Understanding": "https://python.langchain.com/v0.2/docs/integrations/tools/nuclia/", "Nuclia": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/nuclia/"}, "YahooFinanceNewsTool": {"Yahoo Finance News": "https://python.langchain.com/v0.2/docs/integrations/tools/yahoo_finance_news/"}, "JsonToolkit": {"JSON Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/json/"}, "create_json_agent": {"JSON Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/json/"}, "JsonSpec": {"JSON Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/json/", "OpenAPI Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/openapi/"}, "PolygonToolkit": {"Polygon IO Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon_toolkit/", "Polygon IO Toolkit and Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonAPIWrapper": {"Polygon IO Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon_toolkit/", "Polygon IO Toolkit and Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "WikidataAPIWrapper": {"Wikidata": "https://python.langchain.com/v0.2/docs/integrations/tools/wikidata/"}, "WikidataQueryRun": {"Wikidata": "https://python.langchain.com/v0.2/docs/integrations/tools/wikidata/"}, "TwilioAPIWrapper": {"Twilio": "https://python.langchain.com/v0.2/docs/integrations/tools/twilio/"}, "IFTTTWebhook": {"IFTTT WebHooks": "https://python.langchain.com/v0.2/docs/integrations/tools/ifttt/"}, "SemanticScholarQueryRun": {"Semantic Scholar API Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/semanticscholar/"}, "AlphaVantageAPIWrapper": {"Alpha Vantage": "https://python.langchain.com/v0.2/docs/integrations/tools/alpha_vantage/"}, "GitHubToolkit": {"Github Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/github/"}, "GitHubAPIWrapper": {"Github Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/github/"}, "ChatDatabricks": {"Databricks Unity Catalog (UC)": "https://python.langchain.com/v0.2/docs/integrations/tools/databricks/", "ChatDatabricks": "https://python.langchain.com/v0.2/docs/integrations/chat/databricks/"}, "UCFunctionToolkit": {"Databricks Unity Catalog (UC)": "https://python.langchain.com/v0.2/docs/integrations/tools/databricks/"}, "GoogleCloudTextToSpeechTool": {"Google Cloud Text-to-Speech": "https://python.langchain.com/v0.2/docs/integrations/tools/google_cloud_texttospeech/"}, "ClickupToolkit": {"ClickUp Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/clickup/"}, "ClickupAPIWrapper": {"ClickUp Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/clickup/"}, "SparkSQLToolkit": {"Spark SQL Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/spark_sql/"}, "create_spark_sql_agent": {"Spark SQL Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/spark_sql/"}, "SparkSQL": {"Spark SQL Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/spark_sql/"}, "OracleSummary": {"Oracle AI Vector Search: Generate Summary": "https://python.langchain.com/v0.2/docs/integrations/tools/oracleai/", "OracleAI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/providers/oracleai/"}, "StackExchangeAPIWrapper": {"StackExchange": "https://python.langchain.com/v0.2/docs/integrations/tools/stackexchange/", "Stack Exchange": "https://python.langchain.com/v0.2/docs/integrations/providers/stackexchange/"}, "RequestsToolkit": {"Requests Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/requests/"}, "TextRequestsWrapper": {"Requests Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/requests/"}, "OpenWeatherMapAPIWrapper": {"OpenWeatherMap": "https://python.langchain.com/v0.2/docs/integrations/providers/openweathermap/"}, "AINetworkToolkit": {"AINetwork Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/ainetwork/", "AINetwork": "https://python.langchain.com/v0.2/docs/integrations/providers/ainetwork/"}, "get_from_env": {"Passio NutritionAI": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/"}, "NutritionAI": {"Passio NutritionAI": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/"}, "NutritionAIAPI": {"Passio NutritionAI": "https://python.langchain.com/v0.2/docs/integrations/tools/passio_nutrition_ai/"}, "PubmedQueryRun": {"PubMed": "https://python.langchain.com/v0.2/docs/integrations/tools/pubmed/"}, "GradientLLM": {"Memorize": "https://python.langchain.com/v0.2/docs/integrations/tools/memorize/", "Gradient": "https://python.langchain.com/v0.2/docs/integrations/llms/gradient/"}, "create_async_playwright_browser": {"PlayWright Browser Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/playwright/"}, "ElevenLabsText2SpeechTool": {"Eleven Labs Text2Speech": "https://python.langchain.com/v0.2/docs/integrations/tools/eleven_labs_tts/", "ElevenLabs": "https://python.langchain.com/v0.2/docs/integrations/providers/elevenlabs/"}, "create_conversational_retrieval_agent": {"Cogniswitch Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/cogniswitch/"}, "CogniswitchToolkit": {"Cogniswitch Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/cogniswitch/"}, "BearlyInterpreterTool": {"Bearly Code Interpreter": "https://python.langchain.com/v0.2/docs/integrations/tools/bearly/"}, "ExecPython": {"Riza Code Interpreter": "https://python.langchain.com/v0.2/docs/integrations/tools/riza/"}, "ZapierToolkit": {"Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/"}, "ZapierNLAWrapper": {"Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/"}, "SimpleSequentialChain": {"Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "SageMaker Tracking": "https://python.langchain.com/v0.2/docs/integrations/callbacks/sagemaker_tracking/", "Rebuff": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/", "Predibase": "https://python.langchain.com/v0.2/docs/integrations/llms/predibase/", "Eden AI": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/", "Replicate": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/"}, "TransformChain": {"Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/", "Rebuff": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/"}, "ZapierNLARunAction": {"Zapier Natural Language Actions": "https://python.langchain.com/v0.2/docs/integrations/tools/zapier/"}, "RivaASR": {"NVIDIA Riva: ASR and TTS": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "RivaTTS": {"NVIDIA Riva: ASR and TTS": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "RivaAudioEncoding": {"NVIDIA Riva: ASR and TTS": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "AudioStream": {"NVIDIA Riva: ASR and TTS": "https://python.langchain.com/v0.2/docs/integrations/tools/nvidia_riva/"}, "GoldenQueryAPIWrapper": {"Golden Query": "https://python.langchain.com/v0.2/docs/integrations/tools/golden_query/", "Golden": "https://python.langchain.com/v0.2/docs/integrations/providers/golden/"}, "create_react_agent": {"ArXiv": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/", "Amadeus Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/amadeus/", "Ionic Shopping Tool": "https://python.langchain.com/v0.2/docs/integrations/tools/ionic_shopping/", "Streamlit": "https://python.langchain.com/v0.2/docs/integrations/callbacks/streamlit/", "Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/dataherald/", "Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/"}, "ArxivAPIWrapper": {"ArXiv": "https://python.langchain.com/v0.2/docs/integrations/tools/arxiv/"}, "OpenAIFunctionsAgent": {"Robocorp Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/robocorp/", "Exa Search": "https://python.langchain.com/v0.2/docs/integrations/tools/exa_search/", "LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/"}, "NasaToolkit": {"NASA Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/nasa/"}, "NasaAPIWrapper": {"NASA Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/nasa/"}, "MultionToolkit": {"MultiOn Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/multion/"}, "DuckDuckGoSearchRun": {"DuckDuckGo Search": "https://python.langchain.com/v0.2/docs/integrations/tools/ddg/"}, "DuckDuckGoSearchResults": {"DuckDuckGo Search": "https://python.langchain.com/v0.2/docs/integrations/tools/ddg/"}, "DuckDuckGoSearchAPIWrapper": {"DuckDuckGo Search": "https://python.langchain.com/v0.2/docs/integrations/tools/ddg/"}, "SceneXplainTool": {"SceneXplain": "https://python.langchain.com/v0.2/docs/integrations/tools/sceneXplain/"}, "WolframAlphaAPIWrapper": {"Wolfram Alpha": "https://python.langchain.com/v0.2/docs/integrations/providers/wolfram_alpha/"}, "AmadeusToolkit": {"Amadeus Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/amadeus/"}, "HuggingFaceHub": {"Amadeus Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/amadeus/"}, "ReActJsonSingleInputOutputParser": {"Amadeus Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/amadeus/", "MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/"}, "render_text_description_and_args": {"Amadeus Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/amadeus/"}, "EdenAiExplicitImageTool": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiObjectDetectionTool": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiParsingIDTool": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiParsingInvoiceTool": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiSpeechToTextTool": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiTextModerationTool": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAiTextToSpeechTool": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "EdenAI": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/llms/edenai/"}, "MojeekSearch": {"Mojeek Search": "https://python.langchain.com/v0.2/docs/integrations/tools/mojeek_search/"}, "RedditSearchRun": {"Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "RedditSearchAPIWrapper": {"Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "RedditSearchSchema": {"Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "StructuredChatAgent": {"Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "ReadOnlySharedMemory": {"Reddit Search ": "https://python.langchain.com/v0.2/docs/integrations/tools/reddit_search/"}, "YouSearchTool": {"You.com Search": "https://python.langchain.com/v0.2/docs/integrations/tools/you/"}, "AzureAiServicesToolkit": {"Azure AI Services Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/azure_ai_services/"}, "create_structured_chat_agent": {"Azure AI Services Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/azure_ai_services/"}, "reduce_openapi_spec": {"OpenAPI Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/openapi/"}, "RequestsWrapper": {"OpenAPI Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/openapi/"}, "OpenAPIToolkit": {"OpenAPI Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/openapi/"}, "create_openapi_agent": {"OpenAPI Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/openapi/"}, "GitLabToolkit": {"Gitlab Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/gitlab/"}, "GitLabAPIWrapper": {"Gitlab Toolkit": "https://python.langchain.com/v0.2/docs/integrations/tools/gitlab/"}, "ShellTool": {"Shell (bash)": "https://python.langchain.com/v0.2/docs/integrations/tools/bash/"}, "PolygonAggregates": {"Polygon IO Toolkit and Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonFinancials": {"Polygon IO Toolkit and Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonLastQuote": {"Polygon IO Toolkit and Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "PolygonTickerNews": {"Polygon IO Toolkit and Tools": "https://python.langchain.com/v0.2/docs/integrations/tools/polygon/"}, "FileManagementToolkit": {"File System": "https://python.langchain.com/v0.2/docs/integrations/tools/filesystem/"}, "BraveSearch": {"Brave Search": "https://python.langchain.com/v0.2/docs/integrations/providers/brave_search/"}, "RedisChatMessageHistory": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/memory/redis_chat_message_history/"}, "KafkaChatMessageHistory": {"Kafka": "https://python.langchain.com/v0.2/docs/integrations/memory/kafka_chat_message_history/"}, "ElasticsearchChatMessageHistory": {"Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/memory/elasticsearch_chat_message_history/"}, "UpstashRedisChatMessageHistory": {"Upstash Redis": "https://python.langchain.com/v0.2/docs/integrations/memory/upstash_redis_chat_message_history/", "Upstash Vector": "https://python.langchain.com/v0.2/docs/integrations/providers/upstash/"}, "ZepCloudChatMessageHistory": {"ZepCloudChatMessageHistory": "https://python.langchain.com/v0.2/docs/integrations/memory/zep_cloud_chat_message_history/", "Zep": "https://python.langchain.com/v0.2/docs/integrations/providers/zep/"}, "SingleStoreDBChatMessageHistory": {"SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/providers/singlestoredb/"}, "PostgresChatMessageHistory": {"Postgres": "https://python.langchain.com/v0.2/docs/integrations/memory/postgres_chat_message_history/"}, "MomentoChatMessageHistory": {"Momento Cache": "https://python.langchain.com/v0.2/docs/integrations/memory/momento_chat_message_history/", "Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/"}, "XataChatMessageHistory": {"Xata": "https://python.langchain.com/v0.2/docs/integrations/providers/xata/"}, "XataVectorStore": {"Xata": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/xata/"}, "CassandraChatMessageHistory": {"Cassandra ": "https://python.langchain.com/v0.2/docs/integrations/memory/cassandra_chat_message_history/", "Cassandra": "https://python.langchain.com/v0.2/docs/integrations/providers/cassandra/"}, "MotorheadMemory": {"Mot\u00f6rhead": "https://python.langchain.com/v0.2/docs/integrations/memory/motorhead_memory/"}, "AstraDBChatMessageHistory": {"Astra DB ": "https://python.langchain.com/v0.2/docs/integrations/memory/astradb_chat_message_history/"}, "StreamlitChatMessageHistory": {"Streamlit": "https://python.langchain.com/v0.2/docs/integrations/providers/streamlit/"}, "Neo4jChatMessageHistory": {"Neo4j": "https://python.langchain.com/v0.2/docs/integrations/memory/neo4j_chat_message_history/"}, "TiDBChatMessageHistory": {"TiDB": "https://python.langchain.com/v0.2/docs/integrations/providers/tidb/"}, "RocksetChatMessageHistory": {"Rockset": "https://python.langchain.com/v0.2/docs/integrations/providers/rockset/"}, "ChatSnowflakeCortex": {"Snowflake Cortex": "https://python.langchain.com/v0.2/docs/integrations/chat/snowflake/"}, "SolarChat": {"# Related": "https://python.langchain.com/v0.2/docs/integrations/chat/solar/"}, "AzureMLChatOnlineEndpoint": {"AzureMLChatOnlineEndpoint": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/"}, "AzureMLEndpointApiType": {"AzureMLChatOnlineEndpoint": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/", "Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "CustomOpenAIChatContentFormatter": {"AzureMLChatOnlineEndpoint": "https://python.langchain.com/v0.2/docs/integrations/chat/azureml_chat_endpoint/"}, "ChatKinetica": {"Kinetica Language To SQL Chat Model": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/", "Kinetica": "https://python.langchain.com/v0.2/docs/integrations/providers/kinetica/"}, "KineticaSqlOutputParser": {"Kinetica Language To SQL Chat Model": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/"}, "KineticaSqlResponse": {"Kinetica Language To SQL Chat Model": "https://python.langchain.com/v0.2/docs/integrations/chat/kinetica/"}, "PaiEasChatEndpoint": {"Alibaba Cloud PAI EAS": "https://python.langchain.com/v0.2/docs/integrations/chat/alibaba_cloud_pai_eas/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/"}, "ChatCoze": {"Chat with Coze Bot": "https://python.langchain.com/v0.2/docs/integrations/chat/coze/"}, "ChatOctoAI": {"ChatOctoAI": "https://python.langchain.com/v0.2/docs/integrations/chat/octoai/", "OctoAI": "https://python.langchain.com/v0.2/docs/integrations/providers/octoai/"}, "ChatYi": {"ChatYI": "https://python.langchain.com/v0.2/docs/integrations/chat/yi/", "01.AI": "https://python.langchain.com/v0.2/docs/integrations/providers/yi/"}, "ChatDeepInfra": {"DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/providers/deepinfra/"}, "ChatLiteLLM": {"ChatLiteLLM": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm/"}, "LlamaEdgeChatService": {"LlamaEdge": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_edge/"}, "OllamaFunctions": {"OllamaFunctions": "https://python.langchain.com/v0.2/docs/integrations/chat/ollama_functions/"}, "VolcEngineMaasChat": {"VolcEngineMaasChat": "https://python.langchain.com/v0.2/docs/integrations/chat/volcengine_maas/"}, "ChatLlamaAPI": {"ChatLlamaAPI": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_api/"}, "create_tagging_chain": {"ChatLlamaAPI": "https://python.langchain.com/v0.2/docs/integrations/chat/llama_api/"}, "ChatKonko": {"ChatKonko": "https://python.langchain.com/v0.2/docs/integrations/chat/konko/"}, "ChatBedrockConverse": {"ChatBedrock": "https://python.langchain.com/v0.2/docs/integrations/chat/bedrock/"}, "MLXPipeline": {"MLX": "https://python.langchain.com/v0.2/docs/integrations/providers/mlx/", "MLX Local Pipelines": "https://python.langchain.com/v0.2/docs/integrations/llms/mlx_pipelines/"}, "ChatMLX": {"MLX": "https://python.langchain.com/v0.2/docs/integrations/providers/mlx/"}, "format_log_to_str": {"MLX": "https://python.langchain.com/v0.2/docs/integrations/chat/mlx/"}, "GigaChat": {"GigaChat": "https://python.langchain.com/v0.2/docs/integrations/llms/gigachat/", "Salute Devices": "https://python.langchain.com/v0.2/docs/integrations/providers/salute_devices/"}, "JinaChat": {"JinaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/jinachat/"}, "SystemMessagePromptTemplate": {"JinaChat": "https://python.langchain.com/v0.2/docs/integrations/chat/jinachat/", "vLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/vllm/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Figma": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/"}, "ChatOCIGenAI": {"ChatOCIGenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/oci_generative_ai/", "Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/"}, "ChatLlamaCpp": {"Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/providers/llamacpp/"}, "convert_to_openai_tool": {"Llama.cpp": "https://python.langchain.com/v0.2/docs/integrations/chat/llamacpp/"}, "ChatEverlyAI": {"ChatEverlyAI": "https://python.langchain.com/v0.2/docs/integrations/chat/everlyai/"}, "GPTRouter": {"GPTRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/"}, "GPTRouterModel": {"GPTRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/gpt_router/"}, "ChatLiteLLMRouter": {"ChatLiteLLMRouter": "https://python.langchain.com/v0.2/docs/integrations/chat/litellm_router/"}, "ChatFriendli": {"ChatFriendli": "https://python.langchain.com/v0.2/docs/integrations/chat/friendli/"}, "ChatZhipuAI": {"ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/", "Zhipu AI": "https://python.langchain.com/v0.2/docs/integrations/providers/zhipuai/"}, "create_json_chat_agent": {"ZHIPU AI": "https://python.langchain.com/v0.2/docs/integrations/chat/zhipuai/"}, "ChatBaichuan": {"Chat with Baichuan-192K": "https://python.langchain.com/v0.2/docs/integrations/chat/baichuan/", "Baichuan": "https://python.langchain.com/v0.2/docs/integrations/providers/baichuan/"}, "ChatTogether": {"ChatTogether": "https://python.langchain.com/v0.2/docs/integrations/chat/together/", "Together AI": "https://python.langchain.com/v0.2/docs/integrations/llms/together/"}, "Llama2Chat": {"Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/"}, "HuggingFaceTextGenInference": {"Llama2Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/llama2_chat/"}, "QianfanChatEndpoint": {"QianfanChatEndpoint": "https://python.langchain.com/v0.2/docs/integrations/chat/baidu_qianfan_endpoint/", "ErnieBotChat": "https://python.langchain.com/v0.2/docs/integrations/chat/ernie/", "Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/"}, "ChatEdenAI": {"Eden AI": "https://python.langchain.com/v0.2/docs/integrations/providers/edenai/"}, "ErnieBotChat": {"ErnieBotChat": "https://python.langchain.com/v0.2/docs/integrations/chat/ernie/"}, "ChatHunyuan": {"Tencent Hunyuan": "https://python.langchain.com/v0.2/docs/integrations/chat/tencent_hunyuan/", "Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/"}, "MiniMaxChat": {"MiniMaxChat": "https://python.langchain.com/v0.2/docs/integrations/chat/minimax/", "Minimax": "https://python.langchain.com/v0.2/docs/integrations/providers/minimax/"}, "ChatYuan2": {"Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/chat/yuan2/", "IEIT Systems": "https://python.langchain.com/v0.2/docs/integrations/providers/ieit_systems/"}, "ChatTongyi": {"ChatTongyi": "https://python.langchain.com/v0.2/docs/integrations/chat/tongyi/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/"}, "PromptLayerChatOpenAI": {"PromptLayerChatOpenAI": "https://python.langchain.com/v0.2/docs/integrations/chat/promptlayer_chatopenai/", "PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/providers/promptlayer/"}, "ChatSparkLLM": {"SparkLLM Chat": "https://python.langchain.com/v0.2/docs/integrations/chat/sparkllm/", "iFlytek": "https://python.langchain.com/v0.2/docs/integrations/providers/iflytek/"}, "MoonshotChat": {"MoonshotChat": "https://python.langchain.com/v0.2/docs/integrations/chat/moonshot/"}, "ChatDappierAI": {"Dappier AI": "https://python.langchain.com/v0.2/docs/integrations/chat/dappier/"}, "ChatMaritalk": {"Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "MariTalk": "https://python.langchain.com/v0.2/docs/integrations/providers/maritalk/"}, "OnlinePDFLoader": {"Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "UnstructuredPDFLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/unstructured_pdfloader/"}, "load_qa_chain": {"Maritalk": "https://python.langchain.com/v0.2/docs/integrations/chat/maritalk/", "Amazon Textract ": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/amazon_textract/", "SageMakerEndpoint": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/"}, "ChatPremAI": {"ChatPremAI": "https://python.langchain.com/v0.2/docs/integrations/chat/premai/", "PremAI": "https://python.langchain.com/v0.2/docs/integrations/providers/premai/"}, "ChatAnyscale": {"ChatAnyscale": "https://python.langchain.com/v0.2/docs/integrations/chat/anyscale/", "Anyscale": "https://python.langchain.com/v0.2/docs/integrations/providers/anyscale/"}, "ChatYandexGPT": {"ChatYandexGPT": "https://python.langchain.com/v0.2/docs/integrations/chat/yandex/", "Yandex": "https://python.langchain.com/v0.2/docs/integrations/providers/yandex/"}, "ChatPerplexity": {"ChatPerplexity": "https://python.langchain.com/v0.2/docs/integrations/chat/perplexity/", "Perplexity": "https://python.langchain.com/v0.2/docs/integrations/providers/perplexity/"}, "ChatAnthropicTools": {"[Deprecated] Experimental Anthropic Tools Wrapper": "https://python.langchain.com/v0.2/docs/integrations/chat/anthropic_functions/"}, "DeepEvalCallbackHandler": {"Confident": "https://python.langchain.com/v0.2/docs/integrations/callbacks/confident/", "Confident AI": "https://python.langchain.com/v0.2/docs/integrations/providers/confident/"}, "LLMonitorCallbackHandler": {"LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/providers/llmonitor/"}, "identify": {"LLMonitor": "https://python.langchain.com/v0.2/docs/integrations/callbacks/llmonitor/"}, "ContextCallbackHandler": {"Context": "https://python.langchain.com/v0.2/docs/integrations/providers/context/"}, "FiddlerCallbackHandler": {"Fiddler": "https://python.langchain.com/v0.2/docs/integrations/providers/fiddler/"}, "LabelStudioCallbackHandler": {"Label Studio": "https://python.langchain.com/v0.2/docs/integrations/providers/labelstudio/"}, "CometTracer": {"Comet Tracing": "https://python.langchain.com/v0.2/docs/integrations/callbacks/comet_tracing/", "Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/"}, "ArgillaCallbackHandler": {"Argilla": "https://python.langchain.com/v0.2/docs/integrations/providers/argilla/"}, "StdOutCallbackHandler": {"Argilla": "https://python.langchain.com/v0.2/docs/integrations/callbacks/argilla/", "Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/", "Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/", "Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/", "ClearML": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/"}, "PromptLayerCallbackHandler": {"PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/providers/promptlayer/"}, "StreamlitCallbackHandler": {"Streamlit": "https://python.langchain.com/v0.2/docs/integrations/providers/streamlit/", "GPT4All": "https://python.langchain.com/v0.2/docs/integrations/providers/gpt4all/"}, "UpTrainCallbackHandler": {"UpTrain": "https://python.langchain.com/v0.2/docs/integrations/providers/uptrain/"}, "TrubricsCallbackHandler": {"Trubrics": "https://python.langchain.com/v0.2/docs/integrations/providers/trubrics/"}, "InfinoCallbackHandler": {"Infino": "https://python.langchain.com/v0.2/docs/integrations/providers/infino/"}, "UpstashRatelimitError": {"Upstash Ratelimit Callback": "https://python.langchain.com/v0.2/docs/integrations/callbacks/upstash_ratelimit/"}, "UpstashRatelimitHandler": {"Upstash Ratelimit Callback": "https://python.langchain.com/v0.2/docs/integrations/callbacks/upstash_ratelimit/"}, "FigmaFileLoader": {"Figma": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/"}, "Baseten": {"Baseten": "https://python.langchain.com/v0.2/docs/integrations/llms/baseten/"}, "WeatherDataLoader": {"Weather": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/weather/"}, "Tair": {"Tair": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tair/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/"}, "CollegeConfidentialLoader": {"College Confidential": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/college_confidential/"}, "RWKV": {"RWKV-4": "https://python.langchain.com/v0.2/docs/integrations/providers/rwkv/"}, "LakeFSLoader": {"lakeFS": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/lakefs/"}, "FaunaLoader": {"Fauna": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/fauna/"}, "OCIGenAI": {"Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/", "# Oracle Cloud Infrastructure Generative AI": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_generative_ai/"}, "OCIModelDeploymentVLLM": {"Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/", "OCI Data Science Model Deployment Endpoint": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_model_deployment_endpoint/"}, "OCIModelDeploymentTGI": {"Oracle Cloud Infrastructure (OCI)": "https://python.langchain.com/v0.2/docs/integrations/providers/oci/", "OCI Data Science Model Deployment Endpoint": "https://python.langchain.com/v0.2/docs/integrations/llms/oci_model_deployment_endpoint/"}, "OracleDocLoader": {"OracleAI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/providers/oracleai/", "Oracle AI Vector Search: Document Processing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/oracleai/"}, "OracleTextSplitter": {"OracleAI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/providers/oracleai/", "Oracle AI Vector Search: Document Processing": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/oracleai/"}, "OracleVS": {"OracleAI Vector Search": "https://python.langchain.com/v0.2/docs/integrations/providers/oracleai/", "Oracle AI Vector Search: Vector Store": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/oracle/"}, "Lantern": {"Lantern": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/lantern/"}, "DropboxLoader": {"Dropbox": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/dropbox/"}, "ForefrontAI": {"ForefrontAI": "https://python.langchain.com/v0.2/docs/integrations/llms/forefrontai/"}, "CometCallbackHandler": {"Comet": "https://python.langchain.com/v0.2/docs/integrations/providers/comet_tracking/"}, "CTransformers": {"C Transformers": "https://python.langchain.com/v0.2/docs/integrations/llms/ctransformers/"}, "BiliBiliLoader": {"BiliBili": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/bilibili/"}, "TencentCOSDirectoryLoader": {"Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/", "Tencent COS Directory": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tencent_cos_directory/"}, "TencentCOSFileLoader": {"Tencent": "https://python.langchain.com/v0.2/docs/integrations/providers/tencent/", "Tencent COS File": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tencent_cos_file/"}, "OBSDirectoryLoader": {"Huawei": "https://python.langchain.com/v0.2/docs/integrations/providers/huawei/", "Huawei OBS Directory": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/huawei_obs_directory/"}, "OBSFileLoader": {"Huawei": "https://python.langchain.com/v0.2/docs/integrations/providers/huawei/", "Huawei OBS File": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/huawei_obs_file/"}, "DiffbotLoader": {"Diffbot": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/diffbot/"}, "DiffbotGraphTransformer": {"Diffbot": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/", "Neo4j": "https://python.langchain.com/v0.2/docs/integrations/providers/neo4j/"}, "DeepSparse": {"DeepSparse": "https://python.langchain.com/v0.2/docs/integrations/llms/deepsparse/"}, "AimCallbackHandler": {"Aim": "https://python.langchain.com/v0.2/docs/integrations/providers/aim_tracking/"}, "ModernTreasuryLoader": {"Modern Treasury": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/modern_treasury/"}, "GitHubIssuesLoader": {"GitHub": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/github/"}, "GithubFileLoader": {"GitHub": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/github/"}, "Banana": {"Banana": "https://python.langchain.com/v0.2/docs/integrations/llms/banana/"}, "InfinispanVS": {"Infinispan VS": "https://python.langchain.com/v0.2/docs/integrations/providers/infinispanvs/", "Infinispan": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/infinispanvs/"}, "CerebriumAI": {"CerebriumAI": "https://python.langchain.com/v0.2/docs/integrations/llms/cerebriumai/"}, "GutenbergLoader": {"Gutenberg": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/gutenberg/"}, "WikipediaLoader": {"Wikipedia": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/wikipedia/", "Diffbot": "https://python.langchain.com/v0.2/docs/integrations/graphs/diffbot/"}, "ConfluenceLoader": {"Confluence": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/confluence/"}, "Predibase": {"Predibase": "https://python.langchain.com/v0.2/docs/integrations/llms/predibase/"}, "Beam": {"Beam": "https://python.langchain.com/v0.2/docs/integrations/llms/beam/"}, "GrobidParser": {"Grobid": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/grobid/"}, "Typesense": {"Typesense": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/typesense/"}, "Hologres": {"Hologres": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hologres/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/"}, "ArangoGraph": {"ArangoDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/arangodb/"}, "ArangoGraphQAChain": {"ArangoDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/arangodb/"}, "ArcGISLoader": {"ArcGIS": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/arcgis/"}, "WandbCallbackHandler": {"Weights & Biases": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracking/"}, "ObsidianLoader": {"Obsidian": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/obsidian/"}, "BrowserbaseLoader": {"Browserbase": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/browserbase/"}, "OctoAIEndpoint": {"OctoAI": "https://python.langchain.com/v0.2/docs/integrations/llms/octoai/"}, "OctoAIEmbeddings": {"OctoAI": "https://python.langchain.com/v0.2/docs/integrations/providers/octoai/"}, "Nebula": {"Nebula": "https://python.langchain.com/v0.2/docs/integrations/providers/symblai_nebula/", "Nebula (Symbl.ai)": "https://python.langchain.com/v0.2/docs/integrations/llms/symblai_nebula/"}, "Writer": {"Writer": "https://python.langchain.com/v0.2/docs/integrations/llms/writer/"}, "BaichuanLLM": {"Baichuan": "https://python.langchain.com/v0.2/docs/integrations/providers/baichuan/", "Baichuan LLM": "https://python.langchain.com/v0.2/docs/integrations/llms/baichuan/"}, "ApacheDoris": {"Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/"}, "ZepCloudVectorStore": {"Zep": "https://python.langchain.com/v0.2/docs/integrations/providers/zep/", "Zep Cloud": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep_cloud/"}, "BrowserlessLoader": {"Browserless": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/browserless/"}, "AZLyricsLoader": {"AZLyrics": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/azlyrics/"}, "ToMarkdownLoader": {"2Markdown": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tomarkdown/"}, "SparkLLM": {"iFlytek": "https://python.langchain.com/v0.2/docs/integrations/providers/iflytek/", "SparkLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/sparkllm/"}, "Mlflow": {"MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/"}, "MlflowEmbeddings": {"MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/"}, "ChatMlflow": {"MLflow Deployments for LLMs": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow/"}, "GitLoader": {"Git": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/git/"}, "MlflowAIGateway": {"MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/"}, "MlflowAIGatewayEmbeddings": {"MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/"}, "ChatMLflowAIGateway": {"MLflow AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_ai_gateway/"}, "Tigris": {"Tigris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tigris/"}, "Meilisearch": {"Meilisearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/meilisearch/"}, "SQLDatabaseChain": {"Rebuff": "https://python.langchain.com/v0.2/docs/integrations/providers/rebuff/"}, "SnowflakeLoader": {"Snowflake": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/snowflake/"}, "CubeSemanticLoader": {"Cube": "https://python.langchain.com/v0.2/docs/integrations/providers/cube/", "Cube Semantic Layer": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/cube_semantic/"}, "Clickhouse": {"ClickHouse": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/"}, "ClickhouseSettings": {"ClickHouse": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/"}, "TelegramChatFileLoader": {"Telegram": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/telegram/"}, "TelegramChatApiLoader": {"Telegram": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/telegram/"}, "PredictionGuard": {"Prediction Guard": "https://python.langchain.com/v0.2/docs/integrations/llms/predictionguard/"}, "Together": {"Together AI": "https://python.langchain.com/v0.2/docs/integrations/llms/together/"}, "NotionDirectoryLoader": {"Notion DB": "https://python.langchain.com/v0.2/docs/integrations/providers/notion/", "Notion DB 2/2": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/notion/"}, "NotionDBLoader": {"Notion DB": "https://python.langchain.com/v0.2/docs/integrations/providers/notion/", "Notion DB 2/2": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/notiondb/"}, "MWDumpLoader": {"MediaWikiDump": "https://python.langchain.com/v0.2/docs/integrations/providers/mediawikidump/", "MediaWiki Dump": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mediawikidump/"}, "BraveSearchLoader": {"Brave Search": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/brave_search/"}, "StarRocks": {"StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/"}, "GooseAI": {"GooseAI": "https://python.langchain.com/v0.2/docs/integrations/llms/gooseai/"}, "DatadogLogsLoader": {"Datadog Logs": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/datadog_logs/"}, "ApifyWrapper": {"Apify": "https://python.langchain.com/v0.2/docs/integrations/providers/apify/", "Apify Dataset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/"}, "ApifyDatasetLoader": {"Apify": "https://python.langchain.com/v0.2/docs/integrations/providers/apify/", "Apify Dataset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/"}, "NLPCloud": {"NLPCloud": "https://python.langchain.com/v0.2/docs/integrations/providers/nlpcloud/", "NLP Cloud": "https://python.langchain.com/v0.2/docs/integrations/llms/nlpcloud/"}, "Milvus": {"Milvus": "https://python.langchain.com/v0.2/docs/integrations/providers/milvus/", "Zilliz": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zilliz/"}, "SemaDB": {"SemaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/"}, "GitbookLoader": {"GitBook": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/gitbook/"}, "Rockset": {"Rockset": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/rockset/"}, "RocksetLoader": {"Rockset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rockset/"}, "Minimax": {"Minimax": "https://python.langchain.com/v0.2/docs/integrations/llms/minimax/"}, "UnstructuredCHMLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/"}, "UnstructuredCSVLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "CSV": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/csv/"}, "UnstructuredEmailLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Email": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/email/"}, "UnstructuredEPubLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "EPub ": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/epub/"}, "UnstructuredFileIOLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Google Drive": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/google_drive/"}, "UnstructuredImageLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Images": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image/"}, "UnstructuredODTLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Open Document Format (ODT)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/odt/"}, "UnstructuredOrgModeLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "Org-mode": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/org_mode/"}, "UnstructuredPDFLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "UnstructuredPDFLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/unstructured_pdfloader/"}, "UnstructuredRSTLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "RST": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rst/"}, "UnstructuredRTFLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/"}, "UnstructuredTSVLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "TSV": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tsv/"}, "UnstructuredURLLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "URL": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/url/"}, "UnstructuredXMLLoader": {"Unstructured": "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/", "UnstructuredXMLLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/xml/"}, "SelfHostedPipeline": {"Runhouse": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/"}, "SelfHostedHuggingFaceLLM": {"Runhouse": "https://python.langchain.com/v0.2/docs/integrations/llms/runhouse/"}, "MlflowCallbackHandler": {"MLflow": "https://python.langchain.com/v0.2/docs/integrations/providers/mlflow_tracking/"}, "SpreedlyLoader": {"Spreedly": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/spreedly/"}, "OpenLLM": {"OpenLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/openllm/"}, "PubMedLoader": {"PubMed": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pubmed/"}, "SearxSearchResults": {"SearxNG Search API": "https://python.langchain.com/v0.2/docs/integrations/providers/searx/"}, "Modal": {"Modal": "https://python.langchain.com/v0.2/docs/integrations/llms/modal/"}, "OpenCityDataLoader": {"Geopandas": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/geopandas/", "Open City Data": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/open_city_data/"}, "PGEmbedding": {"Postgres Embedding": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgembedding/"}, "SQLiteVSS": {"SQLite": "https://python.langchain.com/v0.2/docs/integrations/providers/sqlite/", "SQLite-VSS": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/"}, "Xinference": {"Xorbits Inference (Xinference)": "https://python.langchain.com/v0.2/docs/integrations/llms/xinference/"}, "IFixitLoader": {"iFixit": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/ifixit/"}, "AlephAlpha": {"Aleph Alpha": "https://python.langchain.com/v0.2/docs/integrations/llms/aleph_alpha/"}, "PipelineAI": {"PipelineAI": "https://python.langchain.com/v0.2/docs/integrations/llms/pipelineai/"}, "FacebookChatLoader": {"Facebook - Meta": "https://python.langchain.com/v0.2/docs/integrations/providers/facebook/", "Facebook Chat": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/facebook_chat/"}, "Epsilla": {"Epsilla": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/epsilla/"}, "AwaDB": {"AwaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/awadb/"}, "ArxivLoader": {"Arxiv": "https://python.langchain.com/v0.2/docs/integrations/providers/arxiv/", "ArxivLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/arxiv/"}, "BlockchainDocumentLoader": {"Alchemy": "https://python.langchain.com/v0.2/docs/integrations/providers/alchemy/", "Blockchain": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/blockchain/"}, "BlockchainType": {"Alchemy": "https://python.langchain.com/v0.2/docs/integrations/providers/alchemy/", "Blockchain": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/blockchain/"}, "Anyscale": {"Anyscale": "https://python.langchain.com/v0.2/docs/integrations/llms/anyscale/"}, "StripeLoader": {"Stripe": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/stripe/"}, "StochasticAI": {"StochasticAI": "https://python.langchain.com/v0.2/docs/integrations/llms/stochasticai/"}, "Bagel": {"BagelDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bageldb/", "Bagel": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/bagel/"}, "TigerGraph": {"TigerGraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/tigergraph/"}, "BlackboardLoader": {"Blackboard": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/blackboard/"}, "YandexGPT": {"Yandex": "https://python.langchain.com/v0.2/docs/integrations/providers/yandex/", "YandexGPT": "https://python.langchain.com/v0.2/docs/integrations/llms/yandex/"}, "UpstashVectorStore": {"Upstash Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/upstash/"}, "NucliaTextTransformer": {"Nuclia": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/nuclia_transformer/"}, "NucliaLoader": {"Nuclia": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/nuclia/"}, "NucliaDB": {"Nuclia": "https://python.langchain.com/v0.2/docs/integrations/providers/nuclia/", "NucliaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/nucliadb/"}, "AnalyticDB": {"AnalyticDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/analyticdb/", "Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/"}, "GoogleApiYoutubeLoader": {"YouTube": "https://python.langchain.com/v0.2/docs/integrations/providers/youtube/", "YouTube transcripts": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_transcript/"}, "PromptLayerOpenAI": {"PromptLayer": "https://python.langchain.com/v0.2/docs/integrations/providers/promptlayer/", "PromptLayer OpenAI": "https://python.langchain.com/v0.2/docs/integrations/llms/promptlayer_openai/"}, "USearch": {"USearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/usearch/"}, "EtherscanLoader": {"Etherscan": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/etherscan/"}, "Arcee": {"Arcee": "https://python.langchain.com/v0.2/docs/integrations/llms/arcee/"}, "WhyLabsCallbackHandler": {"WhyLabs": "https://python.langchain.com/v0.2/docs/integrations/providers/whylabs_profiling/"}, "YiLLM": {"01.AI": "https://python.langchain.com/v0.2/docs/integrations/providers/yi/", "Yi": "https://python.langchain.com/v0.2/docs/integrations/llms/yi/"}, "IuguLoader": {"Iugu": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/iugu/"}, "CouchbaseLoader": {"Couchbase": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/couchbase/"}, "FlyteCallbackHandler": {"Flyte": "https://python.langchain.com/v0.2/docs/integrations/providers/flyte/"}, "wandb_tracing_enabled": {"WandB Tracing": "https://python.langchain.com/v0.2/docs/integrations/providers/wandb_tracing/"}, "ManifestWrapper": {"Hazy Research": "https://python.langchain.com/v0.2/docs/integrations/providers/hazy_research/", "Manifest": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/"}, "OntotextGraphDBGraph": {"Ontotext GraphDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/ontotext/"}, "OntotextGraphDBQAChain": {"Ontotext GraphDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/ontotext/"}, "Marqo": {"Marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/"}, "IMSDbLoader": {"IMSDb": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/imsdb/"}, "TiDBLoader": {"TiDB": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tidb/"}, "TiDBVectorStore": {"TiDB": "https://python.langchain.com/v0.2/docs/integrations/providers/tidb/", "TiDB Vector": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tidb_vector/"}, "DeepInfra": {"DeepInfra": "https://python.langchain.com/v0.2/docs/integrations/llms/deepinfra/"}, "RedditPostsLoader": {"Reddit": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/reddit/"}, "TrelloLoader": {"Trello": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/trello/"}, "AtlasDB": {"Atlas": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/atlas/"}, "SKLearnVectorStore": {"scikit-learn": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sklearn/"}, "EverNoteLoader": {"EverNote": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/evernote/"}, "VDMS": {"VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "Intel's Visual Data Management System (VDMS)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/"}, "VDMS_Client": {"VDMS": "https://python.langchain.com/v0.2/docs/integrations/providers/vdms/", "Intel's Visual Data Management System (VDMS)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vdms/"}, "TwitterTweetLoader": {"Twitter": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/twitter/"}, "DiscordChatLoader": {"Discord": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/discord/"}, "AssemblyAIAudioTranscriptLoader": {"AssemblyAI": "https://python.langchain.com/v0.2/docs/integrations/providers/assemblyai/", "AssemblyAI Audio Transcripts": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/assemblyai/"}, "KineticaLoader": {"Kinetica": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/kinetica/"}, "ClearMLCallbackHandler": {"ClearML": "https://python.langchain.com/v0.2/docs/integrations/providers/clearml_tracking/"}, "CohereRagRetriever": {"Cohere": "https://python.langchain.com/v0.2/docs/integrations/providers/cohere/"}, "SlackDirectoryLoader": {"Slack": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/slack/"}, "OllamaEmbeddings": {"Ollama": "https://python.langchain.com/v0.2/docs/integrations/providers/ollama/", "ApertureDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/aperturedb/"}, "HNLoader": {"Hacker News": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/hacker_news/", "Google Spanner": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/google_spanner/"}, "CTranslate2": {"CTranslate2": "https://python.langchain.com/v0.2/docs/integrations/llms/ctranslate2/"}, "QianfanLLMEndpoint": {"Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/", "Baidu Qianfan": "https://python.langchain.com/v0.2/docs/integrations/llms/baidu_qianfan_endpoint/"}, "BESVectorStore": {"Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/", "Baidu Cloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiducloud_vector_search/"}, "BaiduVectorDB": {"Baidu": "https://python.langchain.com/v0.2/docs/integrations/providers/baidu/", "Baidu VectorDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/baiduvectordb/"}, "Aphrodite": {"PygmalionAI": "https://python.langchain.com/v0.2/docs/integrations/providers/pygmalionai/", "Aphrodite Engine": "https://python.langchain.com/v0.2/docs/integrations/llms/aphrodite/"}, "PaiEasEndpoint": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "Alibaba Cloud PAI EAS": "https://python.langchain.com/v0.2/docs/integrations/llms/alibabacloud_pai_eas_endpoint/"}, "MaxComputeLoader": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "Alibaba Cloud MaxCompute": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/alibaba_cloud_maxcompute/"}, "AlibabaCloudOpenSearch": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "Alibaba Cloud OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/"}, "AlibabaCloudOpenSearchSettings": {"Alibaba Cloud": "https://python.langchain.com/v0.2/docs/integrations/providers/alibaba_cloud/", "Alibaba Cloud OpenSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/alibabacloud_opensearch/"}, "DocusaurusLoader": {"Docusaurus": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/docusaurus/"}, "Annoy": {"Annoy": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/annoy/"}, "BibtexLoader": {"BibTeX": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/bibtex/"}, "Yuan2": {"IEIT Systems": "https://python.langchain.com/v0.2/docs/integrations/providers/ieit_systems/", "Yuan2.0": "https://python.langchain.com/v0.2/docs/integrations/llms/yuan2/"}, "CassandraLoader": {"Cassandra": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/cassandra/"}, "Vearch": {"Vearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vearch/"}, "JoplinLoader": {"Joplin": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/joplin/"}, "ArthurCallbackHandler": {"Arthur": "https://python.langchain.com/v0.2/docs/integrations/providers/arthur_tracking/"}, "AcreomLoader": {"Acreom": "https://python.langchain.com/v0.2/docs/integrations/providers/acreom/", "acreom": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/acreom/"}, "KDBAI": {"KDB.AI": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kdbai/"}, "DuckDBLoader": {"DuckDB": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/duckdb/"}, "Petals": {"Petals": "https://python.langchain.com/v0.2/docs/integrations/llms/petals/"}, "MomentoVectorIndex": {"Momento": "https://python.langchain.com/v0.2/docs/integrations/providers/momento/", "Momento Vector Index (MVI)": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/momento_vector_index/"}, "NIBittensorLLM": {"Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/"}, "AirtableLoader": {"Airtable": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airtable/"}, "LarkSuiteDocLoader": {"ByteDance": "https://python.langchain.com/v0.2/docs/integrations/providers/byte_dance/", "LarkSuite (FeiShu)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/larksuite/"}, "JavelinAIGateway": {"Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Javelin AI Gateway Tutorial": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/"}, "JavelinAIGatewayEmbeddings": {"Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Javelin AI Gateway Tutorial": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/"}, "ChatJavelinAIGateway": {"Javelin AI Gateway": "https://python.langchain.com/v0.2/docs/integrations/providers/javelin_ai_gateway/", "Javelin AI Gateway Tutorial": "https://python.langchain.com/v0.2/docs/integrations/llms/javelin/"}, "TensorflowDatasetLoader": {"TensorFlow Datasets": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/tensorflow_datasets/"}, "Clarifai": {"Clarifai": "https://python.langchain.com/v0.2/docs/integrations/llms/clarifai/"}, "DataheraldTextToSQL": {"Dataherald": "https://python.langchain.com/v0.2/docs/integrations/providers/dataherald/"}, "RoamLoader": {"Roam": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/roam/"}, "RerankConfig": {"Vectara Chat": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vectara/"}, "SummaryConfig": {"Vectara Chat": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vectara/"}, "VectaraQueryConfig": {"Vectara Chat": "https://python.langchain.com/v0.2/docs/integrations/providers/vectara/vectara_chat/", "Vectara": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vectara/"}, "PebbloRetrievalQA": {"Identity-enabled RAG using PebbloRetrievalQA": "https://python.langchain.com/v0.2/docs/integrations/providers/pebblo/pebblo_retrieval_qa/"}, "AuthContext": {"Identity-enabled RAG using PebbloRetrievalQA": "https://python.langchain.com/v0.2/docs/integrations/providers/pebblo/pebblo_retrieval_qa/"}, "ChainInput": {"Identity-enabled RAG using PebbloRetrievalQA": "https://python.langchain.com/v0.2/docs/integrations/providers/pebblo/pebblo_retrieval_qa/"}, "SemanticContext": {"Identity-enabled RAG using PebbloRetrievalQA": "https://python.langchain.com/v0.2/docs/integrations/providers/pebblo/pebblo_retrieval_qa/"}, "RedisStore": {"RedisStore": "https://python.langchain.com/v0.2/docs/integrations/stores/redis/"}, "CassandraByteStore": {"CassandraByteStore": "https://python.langchain.com/v0.2/docs/integrations/stores/cassandra/"}, "UpstashRedisByteStore": {"UpstashRedisByteStore": "https://python.langchain.com/v0.2/docs/integrations/stores/upstash_redis/"}, "ApacheDorisSettings": {"Apache Doris": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/apache_doris/"}, "DistanceStrategy": {"Kinetica Vectorstore API": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/kinetica/", "SAP HANA Cloud Vector Engine": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sap_hanavector/", "SingleStoreDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/singlestoredb/", "Oracle AI Vector Search: Vector Store": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/oracle/", "SemaDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/semadb/"}, "SentenceTransformerEmbeddings": {"SQLite-VSS": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/sqlitevss/", "Vespa": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/"}, "Vald": {"Vald": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vald/"}, "RetrievalQAWithSourcesChain": {"Weaviate": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/weaviate/", "Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/", "Jaguar Vector Database": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/jaguar/", "Neo4j Vector Index": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/neo4jvector/", "Marqo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/marqo/", "Psychic": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/"}, "Yellowbrick": {"Yellowbrick": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/yellowbrick/"}, "LLMRails": {"LLMRails": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/llm_rails/"}, "ChatGooglePalm": {"ScaNN": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/scann/"}, "Hippo": {"Hippo": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/hippo/"}, "RedisText": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "RedisNum": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "RedisTag": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "RedisFilter": {"Redis": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/redis/"}, "VespaStore": {"Vespa": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vespa/"}, "NeuralDBVectorStore": {"ThirdAI NeuralDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/thirdai_neuraldb/"}, "VikingDB": {"viking DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/"}, "VikingDBConfig": {"viking DB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vikingdb/"}, "ApertureDB": {"ApertureDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/aperturedb/"}, "Relyt": {"Relyt": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/relyt/"}, "oraclevs": {"Oracle AI Vector Search: Vector Store": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/oracle/"}, "VLite": {"vlite": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/vlite/"}, "AzureCosmosDBNoSqlVectorSearch": {"Azure Cosmos DB No SQL": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/azure_cosmos_db_no_sql/"}, "DuckDB": {"DuckDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/duckdb/"}, "StarRocksSettings": {"StarRocks": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/starrocks/"}, "PathwayVectorClient": {"Pathway": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pathway/"}, "DocArrayHnswSearch": {"DocArray HnswSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/docarray_hnsw/"}, "TileDB": {"TileDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/tiledb/"}, "EcloudESVectorStore": {"China Mobile ECloud ElasticSearch VectorSearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/ecloud_vector_search/"}, "SurrealDBStore": {"SurrealDB": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/surrealdb/"}, "ManticoreSearch": {"ManticoreSearch VectorStore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/manticore_search/"}, "ManticoreSearchSettings": {"ManticoreSearch VectorStore": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/manticore_search/"}, "HuggingFaceEmbeddings": {"Aerospike": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/aerospike/", "self-query-qdrant": "https://python.langchain.com/v0.2/docs/templates/self-query-qdrant/"}, "Aerospike": {"Aerospike": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/aerospike/"}, "ElasticVectorSearch": {"Elasticsearch": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/elasticsearch/"}, "PGVecto_rs": {"PGVecto.rs": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pgvecto_rs/"}, "ZepVectorStore": {"Zep": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/"}, "CollectionConfig": {"Zep": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/zep/"}, "openai": {"OpenAI Adapter(Old)": "https://python.langchain.com/v0.2/docs/integrations/adapters/openai-old/", "OpenAI Adapter": "https://python.langchain.com/v0.2/docs/integrations/adapters/openai/"}, "RankLLMRerank": {"RankLLM Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/rankllm-reranker/"}, "AsyncChromiumLoader": {"Beautiful Soup": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/beautiful_soup/", "Async Chromium": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/async_chromium/"}, "BeautifulSoupTransformer": {"Beautiful Soup": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/beautiful_soup/"}, "VolcengineRerank": {"Volcengine Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/volcengine_rerank/"}, "OpenVINOReranker": {"OpenVINO Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openvino_rerank/"}, "create_metadata_tagger": {"OpenAI metadata tagger": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/openai_metadata_tagger/"}, "DoctranPropertyExtractor": {"Doctran: extract properties": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_extract_properties/"}, "DoctranQATransformer": {"Doctran: interrogate documents": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_interrogate_document/"}, "CrossEncoderReranker": {"Cross Encoder Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/"}, "HuggingFaceCrossEncoder": {"Cross Encoder Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/cross_encoder_reranker/"}, "JinaRerank": {"Jina Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/jina_rerank/"}, "DoctranTextTranslator": {"Doctran: language translation": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/doctran_translate_document/"}, "MarkdownifyTransformer": {"Markdownify": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/markdownify/"}, "DashScopeRerank": {"DashScope Reranker": "https://python.langchain.com/v0.2/docs/integrations/document_transformers/dashscope_rerank/"}, "XorbitsLoader": {"Xorbits Pandas DataFrame": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/xorbits/"}, "OutlookMessageLoader": {"Email": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/email/"}, "TranscriptFormat": {"AssemblyAI Audio Transcripts": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/assemblyai/", "YouTube transcripts": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_transcript/"}, "ScrapingAntLoader": {"ScrapingAnt": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/scrapingant/"}, "AirbyteSalesforceLoader": {"Airbyte Salesforce (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_salesforce/"}, "AirbyteCDKLoader": {"Airbyte CDK (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_cdk/"}, "Docx2txtLoader": {"Microsoft Word": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/microsoft_word/"}, "RSpaceLoader": {"# replace these ids with some from your own research notes.": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rspace/"}, "SeleniumURLLoader": {"URL": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/url/"}, "PlaywrightURLLoader": {"URL": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/url/"}, "AirbyteJSONLoader": {"Airbyte JSON (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_json/"}, "AirbyteStripeLoader": {"Airbyte Stripe (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_stripe/"}, "GeoDataFrameLoader": {"Geopandas": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/geopandas/"}, "VectorstoreIndexCreator": {"HuggingFace dataset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/hugging_face_dataset/", "Spreedly": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/spreedly/", "Figma": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/figma/", "Apify Dataset": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/apify_dataset/", "Iugu": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/iugu/", "Stripe": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/stripe/", "Modern Treasury": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/modern_treasury/"}, "AirbyteTypeformLoader": {"Airbyte Typeform (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_typeform/"}, "MHTMLLoader": {"mhtml": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mhtml/"}, "SpiderLoader": {"Spider": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/spider/"}, "NewsURLLoader": {"News URL": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/news/"}, "ImageCaptionLoader": {"Image captions": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/image_captions/"}, "LLMSherpaFileLoader": {"LLM Sherpa": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/llmsherpa/"}, "PyMuPDFLoader": {"PyMuPDF": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pymupdf/"}, "ScrapflyLoader": {"# ScrapFly": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/scrapfly/"}, "TomlLoader": {"TOML": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/toml/"}, "PsychicLoader": {"Psychic": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/psychic/"}, "FireCrawlLoader": {"FireCrawl": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/firecrawl/"}, "LarkSuiteWikiLoader": {"LarkSuite (FeiShu)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/larksuite/"}, "FakeListLLM": {"LarkSuite (FeiShu)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/larksuite/"}, "MergedDataLoader": {"Merge Documents Loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/merge_doc/"}, "RecursiveUrlLoader": {"Recursive URL": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/recursive_url/"}, "PDFPlumberLoader": {"PDFPlumber": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pdfplumber/"}, "PyPDFium2Loader": {"PyPDFium2Loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pypdfium2/"}, "AirbyteHubspotLoader": {"Airbyte Hubspot (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_hubspot/"}, "AirbyteGongLoader": {"Airbyte Gong (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_gong/"}, "AstraDBLoader": {"AstraDB": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/astradb/"}, "ReadTheDocsLoader": {"ReadTheDocs Documentation": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/readthedocs_documentation/"}, "MathpixPDFLoader": {"MathPixPDFLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mathpix/"}, "PolarsDataFrameLoader": {"Polars DataFrame": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/polars_dataframe/"}, "DataFrameLoader": {"Pandas DataFrame": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pandas_dataframe/"}, "SurrealDBLoader": {"SurrealDB": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/surrealdb/"}, "DedocFileLoader": {"Dedoc": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/dedoc/"}, "DedocPDFLoader": {"Dedoc": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/dedoc/"}, "DedocAPIFileLoader": {"Dedoc": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/dedoc/"}, "GoogleApiClient": {"YouTube transcripts": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/youtube_transcript/"}, "ConcurrentLoader": {"Concurrent Loader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/concurrent/"}, "RSSFeedLoader": {"RSS Feeds": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/rss/"}, "PebbloSafeLoader": {"Pebblo Safe DocumentLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pebblo/"}, "VsdxLoader": {"Vsdx": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/vsdx/"}, "NotebookLoader": {"Jupyter Notebook": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/jupyter_notebook/"}, "OracleAutonomousDatabaseLoader": {"Oracle Autonomous Database": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/oracleadb_loader/"}, "LanguageParser": {"Source Code": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/source_code/"}, "SRTLoader": {"Subtitle": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/subtitle/"}, "MastodonTootsLoader": {"Mastodon": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mastodon/"}, "AirbyteShopifyLoader": {"Airbyte Shopify (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_shopify/"}, "PyPDFDirectoryLoader": {"PyPDFDirectoryLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pypdfdirectory/"}, "PySparkDataFrameLoader": {"PySpark": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pyspark_dataframe/"}, "AirbyteZendeskSupportLoader": {"Airbyte Zendesk Support (Deprecated)": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/airbyte_zendesk_support/"}, "CoNLLULoader": {"CoNLL-U": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/conll-u/"}, "MongodbLoader": {"MongoDB": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/mongodb/"}, "SitemapLoader": {"Sitemap": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/sitemap/"}, "YuqueLoader": {"Yuque": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/yuque/"}, "PDFMinerLoader": {"PDFMiner": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pdfminer/"}, "PDFMinerPDFasHTMLLoader": {"PDFMiner": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/pdfminer/"}, "QuipLoader": {"Quip": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/quip/"}, "LangSmithLoader": {"LangSmithLoader": "https://python.langchain.com/v0.2/docs/integrations/document_loaders/langsmith/"}, "MemgraphGraph": {"Memgraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/memgraph/"}, "GraphSparqlQAChain": {"RDFLib": "https://python.langchain.com/v0.2/docs/integrations/graphs/rdflib_sparql/"}, "RdfGraph": {"RDFLib": "https://python.langchain.com/v0.2/docs/integrations/graphs/rdflib_sparql/"}, "NebulaGraphQAChain": {"NebulaGraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/nebula_graph/"}, "NebulaGraph": {"NebulaGraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/nebula_graph/"}, "GremlinQAChain": {"Azure Cosmos DB for Apache Gremlin": "https://python.langchain.com/v0.2/docs/integrations/graphs/azure_cosmosdb_gremlin/"}, "GraphIndexCreator": {"NetworkX": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/"}, "GraphQAChain": {"NetworkX": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/"}, "NetworkxEntityGraph": {"NetworkX": "https://python.langchain.com/v0.2/docs/integrations/graphs/networkx/"}, "HugeGraphQAChain": {"HugeGraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/hugegraph/"}, "HugeGraph": {"HugeGraph": "https://python.langchain.com/v0.2/docs/integrations/graphs/hugegraph/"}, "AGEGraph": {"Apache AGE": "https://python.langchain.com/v0.2/docs/integrations/graphs/apache_age/"}, "KuzuQAChain": {"Kuzu": "https://python.langchain.com/v0.2/docs/integrations/graphs/kuzu_db/"}, "KuzuGraph": {"Kuzu": "https://python.langchain.com/v0.2/docs/integrations/graphs/kuzu_db/"}, "FalkorDBQAChain": {"FalkorDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/falkordb/"}, "FalkorDBGraph": {"FalkorDB": "https://python.langchain.com/v0.2/docs/integrations/graphs/falkordb/"}, "ConversationBufferWindowMemory": {"Baseten": "https://python.langchain.com/v0.2/docs/integrations/llms/baseten/", "OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/"}, "Solar": {"Solar": "https://python.langchain.com/v0.2/docs/integrations/llms/solar/"}, "GoogleSearchAPIWrapper": {"Bittensor": "https://python.langchain.com/v0.2/docs/integrations/llms/bittensor/"}, "IpexLLM": {"IPEX-LLM": "https://python.langchain.com/v0.2/docs/integrations/llms/ipex_llm/"}, "LLMContentHandler": {"SageMakerEndpoint": "https://python.langchain.com/v0.2/docs/integrations/llms/sagemaker/"}, "TextGen": {"TextGen": "https://python.langchain.com/v0.2/docs/integrations/llms/textgen/"}, "MosaicML": {"MosaicML": "https://python.langchain.com/v0.2/docs/integrations/llms/mosaicml/"}, "VolcEngineMaasLLM": {"Volc Engine Maas": "https://python.langchain.com/v0.2/docs/integrations/llms/volcengine_maas/"}, "KoboldApiLLM": {"KoboldAI API": "https://python.langchain.com/v0.2/docs/integrations/llms/koboldai/"}, "Konko": {"Konko": "https://python.langchain.com/v0.2/docs/integrations/llms/konko/"}, "OpaquePrompts": {"OpaquePrompts": "https://python.langchain.com/v0.2/docs/integrations/llms/opaqueprompts/"}, "TitanTakeoff": {"Titan Takeoff": "https://python.langchain.com/v0.2/docs/integrations/llms/titan_takeoff/"}, "Friendli": {"Friendli": "https://python.langchain.com/v0.2/docs/integrations/llms/friendli/"}, "Databricks": {"Databricks": "https://python.langchain.com/v0.2/docs/integrations/llms/databricks/"}, "LMFormatEnforcer": {"LM Format Enforcer": "https://python.langchain.com/v0.2/docs/integrations/llms/lmformatenforcer_experimental/"}, "VLLM": {"vLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/"}, "VLLMOpenAI": {"vLLM": "https://python.langchain.com/v0.2/docs/integrations/llms/vllm/"}, "CustomOpenAIContentFormatter": {"Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "ContentFormatterBase": {"Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "DollyContentFormatter": {"Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "load_llm": {"Azure ML": "https://python.langchain.com/v0.2/docs/integrations/llms/azure_ml/"}, "MapReduceChain": {"Manifest": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/"}, "ModelLaboratory": {"Manifest": "https://python.langchain.com/v0.2/docs/integrations/llms/manifest/"}, "ExLlamaV2": {"ExLlamaV2": "https://python.langchain.com/v0.2/docs/integrations/llms/exllamav2/"}, "RELLM": {"RELLM": "https://python.langchain.com/v0.2/docs/integrations/llms/rellm_experimental/"}, "Moonshot": {"MoonshotChat": "https://python.langchain.com/v0.2/docs/integrations/llms/moonshot/"}, "OpenLM": {"OpenLM": "https://python.langchain.com/v0.2/docs/integrations/llms/openlm/"}, "CloudflareWorkersAI": {"Cloudflare Workers AI": "https://python.langchain.com/v0.2/docs/integrations/llms/cloudflare_workersai/"}, "ChatGLM3": {"ChatGLM": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/"}, "ChatGLM": {"ChatGLM": "https://python.langchain.com/v0.2/docs/integrations/llms/chatglm/"}, "Sambaverse": {"SambaNova": "https://python.langchain.com/v0.2/docs/integrations/llms/sambanova/"}, "SambaStudio": {"SambaNova": "https://python.langchain.com/v0.2/docs/integrations/llms/sambanova/"}, "LayerupSecurity": {"Layerup Security": "https://python.langchain.com/v0.2/docs/integrations/llms/layerup_security/"}, "JsonFormer": {"JSONFormer": "https://python.langchain.com/v0.2/docs/integrations/llms/jsonformer_experimental/"}, "WeightOnlyQuantPipeline": {"Intel Weight-Only Quantization": "https://python.langchain.com/v0.2/docs/integrations/llms/weight_only_quantization/"}, "Replicate": {"Replicate": "https://python.langchain.com/v0.2/docs/integrations/llms/replicate/"}, "tracing_v2_enabled": {"Chat Bot Feedback Template": "https://python.langchain.com/v0.2/docs/templates/chat-bot-feedback/"}, "QuerySQLDataBaseTool": {"Build a Question/Answering system over SQL data": "https://python.langchain.com/v0.2/docs/tutorials/sql_qa/"}, "OPENAI_TEMPLATE": {"Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "create_openai_data_generator": {"Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "DatasetGenerator": {"Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "create_data_generation_chain": {"Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}, "create_extraction_chain_pydantic": {"Generate Synthetic Data": "https://python.langchain.com/v0.2/docs/tutorials/data_generation/"}} \ No newline at end of file diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index d35ff75ecb65e..ef7a2559b6b0d 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -1120,7 +1120,6 @@ def with_structured_output( Args: schema: The output schema. Can be passed in as: - - an OpenAI function/tool schema, - a JSON Schema, - a TypedDict class (support added in 0.1.20), @@ -1138,7 +1137,6 @@ def with_structured_output( method: The method for steering model generation, one of: - - "function_calling": Uses OpenAI's tool-calling (formerly called function calling) API: https://platform.openai.com/docs/guides/function-calling @@ -1156,8 +1154,8 @@ def with_structured_output( Learn more about the differences between the methods and which models support which methods here: - - https://platform.openai.com/docs/guides/structured-outputs/structured-outputs-vs-json-mode - - https://platform.openai.com/docs/guides/structured-outputs/function-calling-vs-response-format + - https://platform.openai.com/docs/guides/structured-outputs/structured-outputs-vs-json-mode + - https://platform.openai.com/docs/guides/structured-outputs/function-calling-vs-response-format .. versionchanged:: 0.1.21 @@ -1200,26 +1198,22 @@ def with_structured_output( Returns: A Runnable that takes same inputs as a :class:`langchain_core.language_models.chat.BaseChatModel`. - If ``include_raw`` is False and ``schema`` is a Pydantic class, Runnable outputs - an instance of ``schema`` (i.e., a Pydantic object). + | If ``include_raw`` is False and ``schema`` is a Pydantic class, Runnable outputs an instance of ``schema`` (i.e., a Pydantic object). Otherwise, if ``include_raw`` is False then Runnable outputs a dict. - Otherwise, if ``include_raw`` is False then Runnable outputs a dict. + | If ``include_raw`` is True, then Runnable outputs a dict with keys: - If ``include_raw`` is True, then Runnable outputs a dict with keys: + - "raw": BaseMessage + - "parsed": None if there was a parsing error, otherwise the type depends on the ``schema`` as described above. + - "parsing_error": Optional[BaseException] - - "raw": BaseMessage - - "parsed": None if there was a parsing error, otherwise the type depends on the ``schema`` as described above. - - "parsing_error": Optional[BaseException] + .. dropdown:: Example: schema=Pydantic class, method="function_calling", include_raw=False, strict=True - Example: schema=Pydantic class, method="function_calling", include_raw=False, strict=True: - .. note:: Valid schemas when using ``strict`` = True + Note, OpenAI has a number of restrictions on what types of schemas can be + provided if ``strict`` = True. When using Pydantic, our model cannot + specify any Field metadata (like min/max constraints) and fields cannot + have default values. - OpenAI has a number of restrictions on what types of schemas can be - provided if ``strict`` = True. When using Pydantic, our model cannot - specify any Field metadata (like min/max constraints) and fields cannot - have default values. - - See all constraints here: https://platform.openai.com/docs/guides/structured-outputs/supported-schemas + See all constraints here: https://platform.openai.com/docs/guides/structured-outputs/supported-schemas .. code-block:: python @@ -1252,7 +1246,8 @@ class AnswerWithJustification(BaseModel): # justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.' # ) - Example: schema=Pydantic class, method="function_calling", include_raw=True: + .. dropdown:: Example: schema=Pydantic class, method="function_calling", include_raw=True + .. code-block:: python from langchain_openai import ChatOpenAI @@ -1280,7 +1275,8 @@ class AnswerWithJustification(BaseModel): # 'parsing_error': None # } - Example: schema=TypedDict class, method="function_calling", include_raw=False: + .. dropdown:: Example: schema=TypedDict class, method="function_calling", include_raw=False + .. code-block:: python # IMPORTANT: If you are using Python <=3.8, you need to import Annotated @@ -1310,7 +1306,8 @@ class AnswerWithJustification(TypedDict): # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' # } - Example: schema=OpenAI function schema, method="function_calling", include_raw=False: + .. dropdown:: Example: schema=OpenAI function schema, method="function_calling", include_raw=False + .. code-block:: python from langchain_openai import ChatOpenAI @@ -1339,7 +1336,8 @@ class AnswerWithJustification(TypedDict): # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' # } - Example: schema=Pydantic class, method="json_mode", include_raw=True: + .. dropdown:: Example: schema=Pydantic class, method="json_mode", include_raw=True + .. code-block:: from langchain_openai import ChatOpenAI @@ -1367,7 +1365,8 @@ class AnswerWithJustification(BaseModel): # 'parsing_error': None # } - Example: schema=None, method="json_mode", include_raw=True: + .. dropdown:: Example: schema=None, method="json_mode", include_raw=True + .. code-block:: structured_llm = llm.with_structured_output(method="json_mode", include_raw=True) From 766b650fdca6ae0b55f884b4b5d89a9591cbd248 Mon Sep 17 00:00:00 2001 From: Erick Friis Date: Wed, 21 Aug 2024 13:00:47 -0700 Subject: [PATCH 48/80] chroma: add back fastapi optional dep (#25641) --- libs/partners/chroma/poetry.lock | 24 ++++++++++++------------ libs/partners/chroma/pyproject.toml | 4 ++++ 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/libs/partners/chroma/poetry.lock b/libs/partners/chroma/poetry.lock index ad0b0441bfa98..698a6a2c2cbff 100644 --- a/libs/partners/chroma/poetry.lock +++ b/libs/partners/chroma/poetry.lock @@ -892,7 +892,7 @@ adal = ["adal (>=1.0.2)"] [[package]] name = "langchain-core" -version = "0.2.33" +version = "0.2.34" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -917,13 +917,13 @@ url = "../../core" [[package]] name = "langsmith" -version = "0.1.100" +version = "0.1.101" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.100-py3-none-any.whl", hash = "sha256:cae44a884a4166c4d8b9cc5ff99f5d520337bd90b9dadfe3706ed31415d559a7"}, - {file = "langsmith-0.1.100.tar.gz", hash = "sha256:20ff0126253a5a1d621635a3bc44ccacc036e855f52185ae983420f14eb6c605"}, + {file = "langsmith-0.1.101-py3-none-any.whl", hash = "sha256:572e2c90709cda1ad837ac86cedda7295f69933f2124c658a92a35fb890477cc"}, + {file = "langsmith-0.1.101.tar.gz", hash = "sha256:caf4d95f314bb6cd3c4e0632eed821fd5cd5d0f18cb824772fce6d7a9113895b"}, ] [package.dependencies] @@ -1556,13 +1556,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "posthog" -version = "3.5.0" +version = "3.5.2" description = "Integrate PostHog into any python application." optional = false python-versions = "*" files = [ - {file = "posthog-3.5.0-py2.py3-none-any.whl", hash = "sha256:3c672be7ba6f95d555ea207d4486c171d06657eb34b3ce25eb043bfe7b6b5b76"}, - {file = "posthog-3.5.0.tar.gz", hash = "sha256:8f7e3b2c6e8714d0c0c542a2109b83a7549f63b7113a133ab2763a89245ef2ef"}, + {file = "posthog-3.5.2-py2.py3-none-any.whl", hash = "sha256:605b3d92369971cc99290b1fcc8534cbddac3726ef7972caa993454a5ecfb644"}, + {file = "posthog-3.5.2.tar.gz", hash = "sha256:a383a80c1f47e0243f5ce359e81e06e2e7b37eb39d1d6f8d01c3e64ed29df2ee"}, ] [package.dependencies] @@ -2138,13 +2138,13 @@ dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] [[package]] name = "syrupy" -version = "4.6.1" +version = "4.6.4" description = "Pytest Snapshot Test Utility" optional = false -python-versions = ">=3.8.1,<4" +python-versions = ">=3.8.1" files = [ - {file = "syrupy-4.6.1-py3-none-any.whl", hash = "sha256:203e52f9cb9fa749cf683f29bd68f02c16c3bc7e7e5fe8f2fc59bdfe488ce133"}, - {file = "syrupy-4.6.1.tar.gz", hash = "sha256:37a835c9ce7857eeef86d62145885e10b3cb9615bc6abeb4ce404b3f18e1bb36"}, + {file = "syrupy-4.6.4-py3-none-any.whl", hash = "sha256:5a0e47b187d32b58555b0de6d25bc7bb875e7d60c7a41bd2721f5d44975dcf85"}, + {file = "syrupy-4.6.4.tar.gz", hash = "sha256:a6facc6a45f1cff598adacb030d9573ed62863521755abd5c5d6d665f848d6cc"}, ] [package.dependencies] @@ -2796,4 +2796,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4" -content-hash = "e45811e74878a9b652fef6ee06b10ad2d9e2cc33071bc8413bf2450aa17e47b7" +content-hash = "b6bafda889d07ec7a6d23da03123de6bbd79405f359512df9133d12d5b72a93b" diff --git a/libs/partners/chroma/pyproject.toml b/libs/partners/chroma/pyproject.toml index 91ba52d02edc4..4777db44b7406 100644 --- a/libs/partners/chroma/pyproject.toml +++ b/libs/partners/chroma/pyproject.toml @@ -46,6 +46,10 @@ markers = [ [tool.poetry.dependencies.chromadb] version = ">=0.4.0,<0.6.0,!=0.5.4,!=0.5.5" +[tool.poetry.dependencies.fastapi] +version = ">=0.95.2,<1" +optional = true + [tool.poetry.group.test] optional = true From f535e8a99e0ad8fa342785b3b309046a66940333 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Wed, 21 Aug 2024 13:50:24 -0700 Subject: [PATCH 49/80] docs: ls similar examples header (#25642) --- docs/docs/how_to/example_selectors_langsmith.ipynb | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/docs/how_to/example_selectors_langsmith.ipynb b/docs/docs/how_to/example_selectors_langsmith.ipynb index 2e7ab9a5bcbc0..b8d928082ff13 100644 --- a/docs/docs/how_to/example_selectors_langsmith.ipynb +++ b/docs/docs/how_to/example_selectors_langsmith.ipynb @@ -116,6 +116,8 @@ "id": "5767d171", "metadata": {}, "source": [ + "## Querying dataset\n", + "\n", "Indexing can take a few seconds. Once the dataset is indexed, we can search for similar examples. Note that the input to the `similar_examples` method must have the same schema as the examples inputs. In this case our example inputs are a dictionary with a \"question\" key:" ] }, @@ -223,6 +225,8 @@ "id": "e852c8ef", "metadata": {}, "source": [ + "## Creating dynamic few-shot prompts\n", + "\n", "The search returns the examples whose inputs are most similar to the query input. We can use this for few-shot prompting a model like so:" ] }, From 4591bc0b01d9618b0d4af1f1d65cba2e8b411be8 Mon Sep 17 00:00:00 2001 From: jakerachleff Date: Wed, 21 Aug 2024 14:02:01 -0700 Subject: [PATCH 50/80] Use 1.101 instead of 1.100 bc 1.100 was yanked (#25638) --- docs/docs/how_to/example_selectors_langsmith.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docs/how_to/example_selectors_langsmith.ipynb b/docs/docs/how_to/example_selectors_langsmith.ipynb index b8d928082ff13..0db835427c0b8 100644 --- a/docs/docs/how_to/example_selectors_langsmith.ipynb +++ b/docs/docs/how_to/example_selectors_langsmith.ipynb @@ -18,7 +18,7 @@ "\n", "\n", "\n", "\n", @@ -73,7 +73,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install -qU \"langsmith>=0.1.100\" \"langchain-core>=0.2.34\" langchain langchain-openai langchain-benchmarks" + "%pip install -qU \"langsmith>=0.1.101\" \"langchain-core>=0.2.34\" langchain langchain-openai langchain-benchmarks" ] }, { From 4f347cbcb98c94fa3936321c934c6fc42bd6920e Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Wed, 21 Aug 2024 14:02:25 -0700 Subject: [PATCH 51/80] docs: link Versions in intro (#25640) --- docs/docs/introduction.mdx | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/docs/introduction.mdx b/docs/docs/introduction.mdx index 1c4b651c1b667..2e5c3a099bff3 100644 --- a/docs/docs/introduction.mdx +++ b/docs/docs/introduction.mdx @@ -83,11 +83,13 @@ Trace and evaluate your language model applications and intelligent agents to he ### [🦜🕸️ LangGraph](https://langchain-ai.github.io/langgraph) Build stateful, multi-actor applications with LLMs. Integrates smoothly with LangChain, but can be used without it. - ## Additional resources +### [Versions](/docs/versions/overview/) +See what changed in v0.2, learn how to migrate legacy code, and read up on our release/versioning policies, and more. + ### [Security](/docs/security) -Read up on our [Security](/docs/security) best practices to make sure you're developing safely with LangChain. +Read up on [security](/docs/security) best practices to make sure you're developing safely with LangChain. ### [Integrations](/docs/integrations/providers/) LangChain is part of a rich ecosystem of tools that integrate with our framework and build on top of it. Check out our growing list of [integrations](/docs/integrations/providers/). From fb1d67edf6f667d7f32820b16d63de93512b2c6d Mon Sep 17 00:00:00 2001 From: Scott Hurrey Date: Wed, 21 Aug 2024 18:40:40 -0400 Subject: [PATCH 52/80] box: add retrievers and fix docs (#25633) Thank you for contributing to LangChain! **Description:** Adding `BoxRetriever` for langchain_box. This retriever handles two use cases: * Retrieve all documents that match a full-text search * Retrieve the answer to a Box AI prompt as a Document **Twitter handle:** @BoxPlatform - [x] **Add tests and docs**: If you're adding a new integration, please include 1. a test for the integration, preferably unit tests that do not rely on network access, 2. an example notebook showing its use. It lives in `docs/docs/integrations` directory. - [x] **Lint and test**: Run `make format`, `make lint` and `make test` from the root of the package(s) you've modified. See contribution guidelines for more: https://python.langchain.com/docs/contributing/ Additional guidelines: - Make sure optional dependencies are imported within a function. - Please do not add dependencies to pyproject.toml files (even optional ones) unless they are required for unit tests. - Most PRs should not touch more than one package. - Changes should be backwards compatible. - If you are adding something to community, do not re-import it in langchain. If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, ccurme, vbarda, hwchase17. --------- Co-authored-by: Erick Friis --- docs/docs/integrations/providers/box.mdx | 11 + docs/docs/integrations/retrievers/box.ipynb | 323 ++++++++++++++++++ libs/partners/box/langchain_box/__init__.py | 6 +- .../box/langchain_box/document_loaders/box.py | 73 ++-- .../box/langchain_box/retrievers/__init__.py | 5 + .../box/langchain_box/retrievers/box.py | 158 +++++++++ .../box/langchain_box/utilities/__init__.py | 4 +- .../box/langchain_box/utilities/box.py | 268 ++++++++++----- .../document_loaders/test_box_file_loader.py | 45 +-- .../integration_tests/retrievers/__init__.py | 0 .../retrievers/test_box_retriever.py | 3 + .../utilities/test_box_util.py | 50 +-- .../document_loaders/test_box_loader.py | 41 +++ .../tests/unit_tests/retrievers/__init__.py | 0 .../retrievers/test_box_retriever.py | 89 +++++ .../box/tests/unit_tests/test_imports.py | 3 +- .../unit_tests/utilities/test_box_util.py | 137 +++++++- 17 files changed, 1001 insertions(+), 215 deletions(-) create mode 100644 docs/docs/integrations/retrievers/box.ipynb create mode 100644 libs/partners/box/langchain_box/retrievers/__init__.py create mode 100644 libs/partners/box/langchain_box/retrievers/box.py create mode 100644 libs/partners/box/tests/integration_tests/retrievers/__init__.py create mode 100644 libs/partners/box/tests/integration_tests/retrievers/test_box_retriever.py create mode 100644 libs/partners/box/tests/unit_tests/retrievers/__init__.py create mode 100644 libs/partners/box/tests/unit_tests/retrievers/test_box_retriever.py diff --git a/docs/docs/integrations/providers/box.mdx b/docs/docs/integrations/providers/box.mdx index 52b2b5d360ed8..0f955b460d7a1 100644 --- a/docs/docs/integrations/providers/box.mdx +++ b/docs/docs/integrations/providers/box.mdx @@ -172,3 +172,14 @@ If you wish to use OAuth2 with the authorization_code flow, please use `BoxAuthT from langchain_box.document_loaders import BoxLoader ``` + +## Retrievers + +### BoxRetriever + +[See usage example](/docs/integrations/retrievers/box) + +```python +from langchain_box.retrievers import BoxRetriever + +``` diff --git a/docs/docs/integrations/retrievers/box.ipynb b/docs/docs/integrations/retrievers/box.ipynb new file mode 100644 index 0000000000000..7e0577e998f7c --- /dev/null +++ b/docs/docs/integrations/retrievers/box.ipynb @@ -0,0 +1,323 @@ +{ + "cells": [ + { + "cell_type": "raw", + "id": "afaf8039", + "metadata": {}, + "source": [ + "---\n", + "sidebar_label: Box\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "e49f1e0d", + "metadata": {}, + "source": [ + "# BoxRetriever\n", + "\n", + "This will help you getting started with the Box [retriever](/docs/concepts/#retrievers). For detailed documentation of all BoxRetriever features and configurations head to the [API reference](https://api.python.langchain.com/en/latest/retrievers/langchain_box.retrievers.Box.BoxRetriever.html).\n", + "\n", + "# Overview\n", + "\n", + "The `BoxRetriever` class helps you get your unstructured content from Box in Langchain's `Document` format. You can do this by searching for files based on a full-text search or using Box AI to retrieve a `Document` containing the result of an AI query against files. This requires including a `List[str]` containing Box file ids, i.e. `[\"12345\",\"67890\"]` \n", + "\n", + ":::info\n", + "Box AI requires an Enterprise Plus license\n", + ":::\n", + "\n", + "Files without a text representation will be skipped.\n", + "\n", + "### Integration details\n", + "\n", + "1: Bring-your-own data (i.e., index and search a custom corpus of documents):\n", + "\n", + "| Retriever | Self-host | Cloud offering | Package |\n", + "| :--- | :--- | :---: | :---: |\n", + "[BoxRetriever](https://api.python.langchain.com/en/latest/retrievers/langchain-box.retrievers.langchain_box.BoxRetriever.html) | ❌ | ✅ | langchain-box |\n", + "\n", + "## Setup\n", + "\n", + "In order to use the Box package, you will need a few things:\n", + "\n", + "* A Box account — If you are not a current Box customer or want to test outside of your production Box instance, you can use a [free developer account](https://account.box.com/signup/n/developer#ty9l3).\n", + "* [A Box app](https://developer.box.com/guides/getting-started/first-application/) — This is configured in the [developer console](https://account.box.com/developers/console), and for Box AI, must have the `Manage AI` scope enabled. Here you will also select your authentication method\n", + "* The app must be [enabled by the administrator](https://developer.box.com/guides/authorization/custom-app-approval/#manual-approval). For free developer accounts, this is whomever signed up for the account.\n", + "\n", + "### Credentials\n", + "\n", + "For these examples, we will use [token authentication](https://developer.box.com/guides/authentication/tokens/developer-tokens). This can be used with any [authentication method](https://developer.box.com/guides/authentication/). Just get the token with whatever methodology. If you want to learn more about how to use other authentication types with `langchain-box`, visit the [Box provider](/docs/integrations/providers/box) document." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "b87a8e8b-9b5a-4e78-97e4-274b6b0dd29f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Enter your Box Developer Token: ········\n" + ] + } + ], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "box_developer_token = getpass.getpass(\"Enter your Box Developer Token: \")" + ] + }, + { + "cell_type": "markdown", + "id": "72ee0c4b-9764-423a-9dbf-95129e185210", + "metadata": {}, + "source": [ + "If you want to get automated tracing from individual queries, you can also set your [LangSmith](https://docs.smith.langchain.com/) API key by uncommenting below:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a15d341e-3e26-4ca3-830b-5aab30ed66de", + "metadata": {}, + "outputs": [], + "source": [ + "# os.environ[\"LANGSMITH_API_KEY\"] = getpass.getpass(\"Enter your LangSmith API key: \")\n", + "# os.environ[\"LANGSMITH_TRACING\"] = \"true\"" + ] + }, + { + "cell_type": "markdown", + "id": "0730d6a1-c893-4840-9817-5e5251676d5d", + "metadata": {}, + "source": [ + "### Installation\n", + "\n", + "This retriever lives in the `langchain-box` package:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "652d6238-1f87-422a-b135-f5abbb8652fc", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install -qU langchain-box" + ] + }, + { + "cell_type": "markdown", + "id": "a38cde65-254d-4219-a441-068766c0d4b5", + "metadata": {}, + "source": [ + "## Instantiation\n", + "\n", + "Now we can instantiate our retriever:\n", + "\n", + "## Search" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "70cc8e65-2a02-408a-bbc6-8ef649057d82", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_box import BoxRetriever\n", + "\n", + "retriever = BoxRetriever(box_developer_token=box_developer_token)" + ] + }, + { + "cell_type": "markdown", + "id": "41287857-cfe9-4d39-a84d-e7bd9f1f59a8", + "metadata": {}, + "source": [ + "## Box AI" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ee0e726d-9974-4aa0-9ce1-0057ec3e540a", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_box import BoxRetriever\n", + "\n", + "box_file_ids = [\"1514555423624\", \"1514553902288\"]\n", + "\n", + "retriever = BoxRetriever(\n", + " box_developer_token=box_developer_token, box_file_ids=box_file_ids\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "5c5f2839-4020-424e-9fc9-07777eede442", + "metadata": {}, + "source": [ + "## Usage" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "51a60dbe-9f2e-4e04-bb62-23968f17164a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[Document(metadata={'source': 'https://dl.boxcloud.com/api/2.0/internal_files/1233039227512/versions/1346280085912/representations/extracted_text/content/', 'title': 'FIVE_FEET_AND_RISING_by_Peter_Sollett_pdf'}, page_content='\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 1/25\\n\\nFIVE FEET AND RISING\\n\\nby\\n\\nPeter Sollett\\n\\nFADE IN:\\n\\nEXT. 8TH STREET BETWEEN AVENUES C AND D - DAY \\n\\nA group of dark-skinned girls wearing cheerleading outfits \\nalign themselves in formation on the sidewalk. They begin to \\ndance. No music can be heard. The sound of the girls\\' bodies \\nis our soundtrack. We hear their strained breathing, palms \\nand sneaker bottoms pounding while they hum and count softly \\nto themselves in an effort to keep the rhythm.\\n\\nSLO-MO: We explore the bodies of the dancers; their bright \\neyes and sweaty brows, their stomping feet and colliding \\nhands (dark side and light side). The younger girls perform \\nprovocative dance movements, the older girls repeat them.\\n\\nTheir bodies silhouette in the bright sunlight.\\n\\nCUT TO: TITLES\\n\\nEXT. AMANDA\\'S BLOCK - DAY \\n\\nAMANDA, a tall 14-year-old exits the front door of her \\napartment budding with her headphones in one hand and a \\nmagazine in the other. She sits down on her stoop, puts her \\nheadphones on and presses \"play\". We can hear the sound of \\nSalsa leaking out of the sides of her headphones. JENETTE, \\nten years old with big black hair in rubber-band restraints, sits on \\nthe sidewalk below Amanda drawing with a piece of chalk. Jenette \\n\\n looks over her shoulder and sees Amanda reading her magazine Jenette \\nclimbs the stairs and sits down beside her.\\n\\nThe camera pans to reveal AARON, an 18-year-old boy on the \\nother side of the street, unloading some fireworks from the \\ntrunk of a car. He\\'s filling a paper bag with them, \\ncarefully making sure not to reveal what he\\'s doing to \\nonlookers.\\n\\nAt the ear end of the block, DONNA, 14, and MICHELLE, 12, \\nsit and watch Aaron at work.\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 2/25\\n\\nMICHELLE \\nHow does he look up close?\\n\\nDONNA \\n(Amorously) \\n\\nUmm, he got dark brownish \\neyes, he got a nice nose I \\nlove his nose. I love his \\nskin. I love his lips, he \\ngot a great smile and he \\ngot-\\n\\nMICHELLE \\nA bad attitude.\\n\\nDONNA \\nYeah, he got a bad \\nattitude.\\n\\nMICHELLE \\nYou said before, that he \\ngot boxes?\\n\\nSLO-MO: The camera is very close to Aaron. Details of his \\nbody in a shallow depth of field.\\n\\nDONNA \\nYeah, he got boxes in his \\nstomach. He\\'s taller than \\nme.\\n\\nMICHELLE \\nHow old is he?\\n\\nDONNA \\nI think he\\'s 18 or 17.\\n\\nMICHELLE \\nYou gonna talk to him?\\n\\nDONNA \\nUm, yeah I think so.\\n\\nBack on Amanda\\'s stoop.\\n\\nJENETTE \\nYou still like him.\\n\\nAMANDA \\n(With a sigh of negative attitude) \\n\\nNo.\\n\\nHECTOR, a mature-looking 13-year old is crossing the \\nstreet. He enters frame with Amanda and Jenette. \\n\\nHECTOR \\nYo, wuzzup. \\n\\nAmanda ignores him. \\n\\nJENETTE \\nHi Hector\\n\\nHECTOR \\n(To Amanda) \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 3/25\\n\\nOh, you\\'re not gonna say \\nhello.\\n\\nAMANDA\\'S POV: Donna approaches Aaron as he locks up the \\ntrunk of the car. She hesitantly calls over to him. He \\nacknowledges her with a lift of the chin. Making sure he \\nkeeps his distance from her, he looks around to see if \\nanyone is watching him. He tosses his head for her to \\nfollow. He begins to walk away down the block. She follows.\\n\\nHector is looking at Amanda. He appears to have run out of \\nthings to say. Amanda removes her headphones. Her music \\nbecomes clearer, more audible.\\n\\nHECTOR \\nYo, you gonna keep me \\nhangin\\' like dat?\\n\\nAMANDA \\nHector, Yo try to rap to me every day, why don\\'t you \\ntake your three-quarters retarded ass outta here?\\n\\nHECTOR \\nYo, you betta give me my \\nrespects or I\\'ll tell your \\nlittle girl ova here what \\nI heard about you and my \\nboy.\\n\\nAmanda puts her headphones back on.\\n\\nHECTOR looks like he got a new girl anyway.\\n\\nEXT. 8TH STREET BETWEEN AVENUES C AND D - AFTERNOON \\n\\nVICTOR, a skinny 12-year-old with sloppy hair, is asleep in \\nthe sun on his fire escape. There is sweat beaded up on his \\nbody. His shirt is rolled up behind his head like a pillow. \\nHis breath is heavy, his chest rises and falls. The camera \\ntilts to reveal CARLOS, ten, rounding the corner on the \\nstreet below. The camera tracks backwards as Carlos \\napproaches. He is talking to himself.\\n\\nCARLOS \\n(To himself) \\n\\nWhatcha gonna do when ya \\nbitch is untrue?\\n\\nCarlos lifts his head up to look at the fire escapes.\\n\\nCARLOS \\nYo Victor!\\n\\nThe camera pans and tilts up to the fire escapes. The \\nbuildings float by. He arrives in front of Victor\\'s \\nbuilding and cups his hand around his mouth.\\n\\nCARLOS \\nYo Victor!\\n\\nOn the fire escape, Victor\\'s eyes pop open and the sun \\nshines into them.\\n\\nVICTOR \\n(Dazed) \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 4/25\\n\\nWhat, wuzzup?\\n\\nVictor sits up and looks over the side of the fire escape.\\n\\nCARL0S \\nCome down!\\n\\nVICTOR \\nI can\\'t!\\n\\nCARLOS \\nWhy?\\n\\nVICTOR \\nI got punished, man.\\n\\nCARLOS \\nFa what?\\n\\nVICTOR \\nI won\\'t let my motha cut \\nmy hair.\\n\\nCARLOS \\nWha\\'?\\n\\nVICTOR \\nShe fucks it all up!\\n\\nCARLOS \\nForget it! C\\'mon Let\\'s go \\nto the pool.\\n\\nVICTOR \\nI can\\'t man, I\\'m punished!\\n\\nCARL0S \\nWho gives!\\n\\nVICTOR \\nI can\\'t, I\\'m gonna get \\npunished more!\\n\\nCARLOS \\nTrust me, I always get \\ninto trouble, c\\'mon!\\n\\nVictor sits down on the fire escape. Carlos pauses for a \\nminute and turns his back on Victor.\\n\\nCARL0S \\nC\\'mon! The girls are \\nwaiting for you!\\n\\nVictor hops back up.\\n\\nVICTOR \\n(interested) \\n\\nThey are?\\n\\nCARLOS \\nYeah! Tell me which one you would like. To be doin\\' \\nnothin on a fire escape or beat the pool with a bunch of \\ngirls? Be straight up!\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 5/25\\n\\nVICTOR \\nI\\'ll be right down.\\n\\nVictor climbs down the fire escape and hops down to the \\nstreet. he immediately grabs Carlos and starts pushing him \\ndown the block to avoid being seen from above.\\n\\nEXT. THE CORNER OF 8TH STREET AND AVENUE D - CONTINUOUS \\n\\nThe boys safely round the corner onto Avenue D. Victor \\nperks up and starts nudging Carlos.\\n\\nVICTOR \\nSo what girls are over \\nthere?\\n\\nCARLOS \\nNatasha, Maria, Tina-\\n\\nVICTOR \\nThese are the pretty girls \\nyou told me to come down \\nfor?\\n\\nVictor sighs and runs his fingers through his hair.\\n\\nCARLOS \\nWhat\\'s the difference, you \\nnever do anything anyway\\n\\nVictor makes a disagreeing gesture. Carlos drags Victor \\nDowntown.\\n\\nVICTOR \\nWhat are you going that \\nway for?\\n\\nCARLOS \\nI\\'m not goin\\' to 10th \\nStreet, people piss and \\nshit in that pool,\\n\\nVICTOR \\nWhere you goin\\'?\\n\\nCARLOS \\nPitt.\\n\\nVICTOR \\nOh man, what we gotta \\nleave ar\\' own neighborhood \\nfor?\\n\\nCARLOS \\nC\\'mon.\\n\\nVICTOR \\nMan, if I go down you\\'re \\ngoin\\' down with me.\\n\\nEXT. AVENUE D - CONTINUOUS \\n\\nMONTAGE: Victor and Carlos hang on each other as they walk down \\n Avenue D towards the Pitt Street Pool. They pass by Victor brother, \\n giving him an impromptu smack and then bang on a store window to wave \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 6/25\\n\\nhello to a friend.\\n\\nFrom Victor\\'s POV we work our way through a crowd of people \\nan cross 3rd Street. Victor looks back at the crowd with a \\nwatchful eye. The camera tracks along in the street as the \\nboys walk along the sidewalk. Victor looks up at a street \\nsign. It reads, \"Houston St.\" As the boys make their way \\nacross the wide intersection, the heat is slightly visible \\nas car exhaust fills the gridlocked lanes. Victor and \\nCarlos walk calmly, with space between them, making their \\nway towards the camera in a shallow depth of field as we \\nfollow focus on them.\\n\\nEXT. PITT STREET POOL - CONTINUOUS \\n\\nVictor and Carlos stand on line outside the pool gates. Police \\nexamine the boys as they slowly inch their way into the park.\\n\\nFrom Victor\\'s POV we see the expanse of the pool as he \\nenters the park. We watch as he surveys the area.\\n\\nFrom a high angle we see Carlos nudge Victor to make his \\nway onto the pool deck. They enter the crowd, proceeding \\ncarefully, making sure not to bump anyone.\\n\\nAs they continue to walk, Victor\\'s POV reveals the bodies \\nof older boys and girls, rough water and mischievous kids.\\n\\nEXT. PITT STREET POOL - CONTINUOUS \\n\\nAmanda is sitting poolside with Jenette.\\n\\nAMANDA \\nAnd that girl, over there? \\nHoochie.\\n\\nJenette looks out across the pool trying to see who Amanda \\nis talking about.\\n\\nAMANDA \\nAnd him. Stay away from \\nhim, he\\'s only interested \\nin that.\\n\\nAmanda points between Jenette\\'s legs.\\n\\nAMANDA \\nThat right there.\\n\\nCarlos steps in front of them, Amanda smacks his leg.\\n\\nCARLOS \\nYo Amanda, wassup?\\n\\nThey kiss on the cheek.\\n\\nCARLOS \\n(To Victor) \\n\\nAmanda is Eddie\\'s cousin.\\n\\nVICTOR \\nEddie from Compost?\\n\\nCARLOS \\nNo, Baruch.\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 7/25\\n\\nA girl swimming in the pool calls over to Carlos.\\n\\nGIRL 1 \\nCarlos! Carlos, get your \\nskinny ass over here!\\n\\nCARLOS \\n(To Victor) \\n\\nStay right here, I\\'ll be \\nright back.\\n\\nCarlos walks off leaving Victor standing next to Amanda and \\nJenette. Victor looks uncomfortable.\\n\\nAMANDA \\nWho are you?\\n\\nVICTOR \\nI\\'m wit\\' Carlos.\\n\\nAmanda points out across the pool.\\n\\nAMANDA \\n(To Jenette) \\n\\nHim right there, That\\'s \\nwho I\\'m talkin\\' about. \\n\\n(to Victor) \\nExcuse me, can you move, I \\ncan\\'t see.\\n\\nAmanda spots Aaron and Donna in the distance.\\n\\nAMANDA \\nDo you have a name?\\n\\nVICTOR \\nVictor.\\n\\nAMANDA \\nWhat?\\n\\nVICTOR \\nVictor.\\n\\nAmanda turns to Jenette and continues talking to her.\\n\\nVICTOR \\nUmm, I\\'m gonna go find \\nCarlos.\\n\\nAs Victor turns to walk, the camera pans to follow him, \\nrevealing Hector who is approaching Amanda. The camera then \\npans back to Amanda. She sighs and turns her ahead away \\nfrom him.\\n\\nEXT. PITT STREET POOL - CONTINUOUS \\n\\nIn the playground area at the Pitt Street Pool, Aaron is doing a\\' \\nimpression of Al Pacino. Darrell and Boy 1 look on.\\n\\nAARON \\n(to Boy 1) \\n\\nYou wanna meet my little \\nfriend? \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 8/25\\n\\nBoy 1 is laughing at Aaron. Donna and Michelle stand nearby \\nwatching.\\n\\nAARON \\nDon\\'t fuck wit\\' me! Don\\'t \\nfuck wit\\' me.\\n\\n(pointing his finger)\\nMy lawyer\\'s so good he\\'ll \\nhave you workin in Alaska, \\nso dress warn.\\n\\nDONNA \\nAaron, how you doin\\'?\\n\\nAARON \\nFine.\\n\\nDONNA \\nLook at me.\\n\\nAARON \\nWhat?\\n\\nDONNA \\nWhy you have an attitude \\nfor?\\n\\nAARON \\nNot now, I\\'m busy\\n\\nDONNA \\nGod, I just wanna speak to \\nyou. I just wanna speak \\nto you the way I feel \\nabout you.\\n\\nAARON \\nHurry up, you\\'re wastin\\' \\nmy time, what the fuck.\\n\\nAaron turns back to his friends.\\n\\nDONNA \\nPlease don\\'t scream at me. \\nI like you, but I don\\'t \\nlike the way your attitude \\nis.\\n\\nAARON \\nSo get the fuck outta \\nhere.\\n\\nDarrell and Boy 1 approve. They wait for Donna\\'s reply.\\n\\nDONNA \\nI wanna go out with you, I \\nwant to be part of your \\nlife. I want you to treat \\nme the way a girlfriend \\nshould be treated.\\n\\nAARON \\nThen don\\'t go out with me.\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 9/25\\n\\nDONNA \\nFor once in your life have \\nsome respect for me, \\ndon\\'t even curse at me or \\nnothin\\'.\\n\\nAARON \\n(to Darrell) \\n\\nNow she\\'s tellin\\' me what \\nthe fuck to do.\\n\\nDONNA \\nGod, you drive me crazy. I \\njust want you to know how \\nI feel and you don\\'t \\nunderstand.\\n\\nAARON \\nJust get the fuck outta \\nhere.\\n\\nDonna stares at Aaron as he turns back to his friends. \\nMichelle walks up to Donna and gently leads her away.\\n\\nAARON \\nThat girl be trippin\\'. \\n\\n(Back into his Pacino impression) \\nOne time I let her kiss my \\nrings and forever \\nshe tries to repay me!\\n\\nEXT. PITT STREET POOL - LATER \\n\\nVictor and Carlos are playing, trying to force each others\\' heads \\nunderwater. \\nCarlos squirts water through his lips.\\n\\nVICTOR \\nI gotta go take a piss.\\n\\nCARLOS \\nIf we were at 10th Street \\nPool you woulda done it \\nright in the water, right?\\n\\nThe camera pans as Victor climbs out of the pool and onto a \\nlong line. As he stands and waits, Amanda can be seen in \\nthe background arguing with Hector.\\n\\nIn the water, Carlos makes a face at Victor. Victor makes \\none back.\\n\\nVICTOR \\n(Under his breath to Carlos) \\n\\nI\\'m gonna beat you.\\n\\nEXT. PITT STREET POOL - CONTINUOUS \\n\\nHector and Amanda have been arguing. Jenette is sitting on the ground \\nbeneath \\nthem.\\n\\nHECTOR \\nI know you likes me.\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 10/25\\n\\nAMANDA \\nI eave me alone!\\n\\nHECTOR \\nWhy don\\'t you share the \\nwealth a little bit?\\n\\nHector grabs her wrist.\\n\\nAMANDA \\nExcuse me! I gotta go to \\nthe ladies\\' room!\\n\\nEXT. PITT STREET POOL - MOMENTS LATER \\n\\nAmanda gets on line behind Victor as he continues to antagonize \\nCarlos in the \\ndistance. Amanda recognizes Victor from behind, peeking \\nover his shoulder at the side of his face.\\n\\nAMANDA \\nShorty!\\n\\nVictor turns around to Amanda.\\n\\nAMANDA \\nWussup?\\n\\nVICTOR \\nWussup.\\n\\nHe turns back around.\\n\\nAMANDA \\nYo shorty!\\n\\nVictor turns back around.\\n\\nVICTOR \\nWhat?\\n\\nAmanda hears something over her shoulder and spins her head \\naround.\\n\\nAMANDA \\n(to Hector) \\n\\nLeave me alone! \\n(To Victor) \\n\\nYo, I know another \\nbathroom over there, c\\'mon \\nthis line\\'s too long.\\n\\nAmanda takes Victor by the hand and walks towards Hector. \\nShe bumps into him with Victor.\\n\\nAMANDA \\nExcuse us.\\n\\nAmanda gives Hector a snotty grin.\\n\\nShe drags Victor away.\\n\\nEXT. PITT STREET POOL - CONTINUOUS \\n\\nIn a small corner, out of sight to the rest of the kids at the pool. \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 11/25\\n\\nAmanda \\ncomplains to Victor. Victor looks confused.\\n\\nAMANDA \\nThis fucking guy be \\nfollowin\\' me around, and \\ntouchin\\' me. Asshole!\\n\\nShe sighs and pulls on the bathroom door. It\\'s locked. She \\ngives it another try. It won\\'t budge.\\n\\nAMANDA \\nLook, just do me a favor. \\nStand right here, okay?\\n\\nAmanda takes Victor\\'s hand for balance and squats down, \\npulling her bathing suit bottoms to the side. She urinates. \\nVictor watches her, trying to play it cool. The camera \\ntilts up from Amanda\\'s face peeking up at Victor, to their \\nhands straining for balance, to Victor\\'s wandering eyes.\\n\\nEXT. 8TH STREET BETWEEN AVENUES C AND D - DAY \\n\\nClose-up of ERICA looking into the camera.\\n\\nERICA \\nWe\\'re \"Fantasy\" and This \\nis Shai, Diamond-\\n\\nFRANCESCA \\nAnd I\\'m Melody.\\n\\nWe see the three girls standing in line on the sidewalk.\\n\\nERICA \\nAnd we\\'re gonna sing a \\nsong called \\'Tell me \\nWhat.\\' It was written by \\nmyself, Diamond and Shai \\nand the vocals were \\narranged by us two.\\n\\nFrancesca rolls her eyes.\\n\\nERICA \\nAlso, it was written May \\n24th 1998 at 10:20 p.m. \\nCheck it out.\\n\\nThe girls begin to sing.\\n\\nCarlos stands in front of the singers mocking them. The \\ncamera pans to see Victor approaching Carlos.\\n\\nVICTOR \\nWussup?\\n\\nCARL0S \\nWussup, Victor.\\n\\nVICTOR \\nYo, can I talk to you for \\na minute?\\n\\nCarlos nods his head. Victor leans into Carlos, resting his \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 12/25\\n\\narm on Carlos\\' shoulder.\\n\\nVICTOR \\nYo, remember from the \\npool, that girl?\\n\\nCARLOS \\nWhich one?\\n\\nVICTOR \\nYou know, Eddie\\'s cousin.\\n\\nCARL0S \\nEddie from Compost?\\n\\nVICTOR \\nEddie from Baruch, the one \\nwho was sittin\\' wit\\' dat \\nlittle girl;\\n\\nCARLOS \\nThe one with the phat ass?\\n\\nVICTOR \\nNo, c\\'mon, stop playin\\'. \\nThe girl that you kissed \\nwhen we got there. Where s\\nhe live at?\\n\\nCARL0S \\nWhy don\\'t you ask Eddie?\\n\\nVICTOR \\nYo, Carlos-I\\'m gonna punch \\nyou.\\n\\nCARLOS \\n(Mockingly) \\n\\nI\\'m gonna punch you. What \\nyou want with her anyway? \\nYou in love with her?\\n\\nVICTOR \\nShe lives near Eddie?\\n\\nCARLOS \\nI think she lives down by \\nPitt.\\n\\nVICTOR \\nNear Natasha\\'s? Or over by \\nBoy\\'s Club?\\n\\nCARLOS \\nI think by Twenty-two.\\n\\nVICTOR \\nFor real?\\n\\nCARLOS \\nWhat you want with her \\nanyway\\'\\n\\nVictor starts walking off down the block.\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 13/25\\n\\nCARLOS \\nYo! What you goin\\' for\\n\\nVICTOR\\n\\'Cause you know what, \\nyou\\'re not supposed to \\nknow but yesterday she \\nlent me her pills for her \\nMoms and if I don\\'t give \\n\\'em to her she\\'s gonna \\ndie. You want her to die?\\n\\nCarlos shrugs Victor off as he walks away down the block.\\n\\nA moment passes.\\n\\nCARLOS \\n(to himself)\\n\\nWhat do you do when your \\nbitch is untrue? \\nYou cut the hooker off and \\nfind someone new. I need \\nanother bitch another \\nbitch in my life.\\n\\nEXT. LOWER EAST SIDE - CONTINUOUS \\n\\nMONTAGE: Victor\\'s trip through the streets in search of Amanda\\'s \\nblock.\\n\\nEXT. AMANDA\\'S BLOCK - LATER \\n\\nDonna and Michelle are standing in front of their building.\\n\\nMICHELLE \\nOkay, merengue, you do \\nlike this-\\n\\nMichelle places one hand on her side, the other in the air \\nand begins to step.\\n\\nDONNA \\nLike this?\\n\\nMICHELLE \\nYeah, that\\'s right, you \\ngot it girl.\\n\\nMichelle grabs Donna, they embrace and dance.\\n\\nMICHELLE \\nNow salsa, you know how to \\ndance salsa?\\n\\nDONNA \\nYeah.\\n\\nMICHELLE \\nOkay, then dance. Show.\\n\\nDonna dances. Michelle looks over her shoulder. Aaron is \\ndrinking a bottle of soda across the street.\\n\\nMICHELLE \\nI don\\'t think he\\'s \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 14/25\\n\\nwatching.\\n\\nEXT. AMANDA\\'S BLOCK - CONTINUOUS \\n\\nAaron sits on a stoop across the street from Michelle and Donna He\\'s \\nshaking up \\na bottle of soda, then opening the cap to let the bubbles \\nout. He\\'s got a large brown paper bag with him. Victor the rounds the \\ncorner, \\nhis eyes are scanning across the buildings on the block.\\n\\nAARON \\nYo Shorty, you wanna buy \\nsome M-80s?\\n\\nVICTOR \\nNah.\\n\\nAARON\\nTwenty-four for two \\ndollars, son, and ain\\'t \\ntalkin\\' no little pussy \\nboxes, I\\'m talkin\\' big \\nones.\\n\\nVICTOR \\nNah.\\n\\nAARON \\nAlright, I\\'ll be here, if \\nanything.\\n\\nVictor continues down the block.\\n\\nEXT. AMANDA\\'S BLOCK - CONTINUOUS \\n\\nVictor finds Jenette sitting on Amanda\\'s stoop. She appears to have \\njust come \\noutside as she unties a jump rope that is knotted around her \\nwaist Victor stands next to her for a moment waiting awkwardly to \\nspeak.\\n\\nJenette is ignoring him.\\n\\nAaron watches from down the block.\\n\\nVictor steps towards Jenette. As he turns to face her, she \\nis roll her sock down to her ankle and preparing to jump \\n\\nher rope.\\n\\nVICTOR \\nHey, little girl, you know \\na girl named Amanda who \\nlives around here? \\n\\nJENETTE \\nNo.\\n\\nJenette stands sloppily in front of him on the street. She \\nsays nothing and begins to jump. Smic-smac, smic-smac, \\nsmic-smack.\\n\\nVICTOR \\nYou sure? She\\'s got kind \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 15/25\\n\\nof like brown hair.\\n\\nJENETTE \\nPositive.\\n\\nVICTOR \\nYou sure?\\n\\nJENETTE \\nPositive.\\n\\nVICTOR \\nMy friend told me she \\nlived around here.\\n\\nJENETTE \\nYour friend must be \\nmisinformed.\\n\\nVICTOR \\nDidn\\'t I see you at Pitt \\nyesterday?\\n\\nA pause.\\n\\nJENETTE \\nSo what do you want with \\nher anyway?\\n\\nVICTOR \\nI\\'m a good friend of hers.\\n\\nJENETTE \\nHow do I know you\\'re not \\nlying.\\n\\nVICTOR \\nYo, I know what you\\'re \\nthinking, that I\\'m one of \\nthose guys that keep \\ncoming up to her.\\n\\nJENETTE \\nProbably.\\n\\n(Under her breath) \\nOne of the many.\\n\\nVICTOR \\nWhat?\\n\\nJENETTE \\nNothing.\\n\\nHector approaches Victor from down the block.\\n\\nHECTOR \\nExcuse me, can I help you?\\n\\nVictor doesn\\'t answer.\\n\\nHECTOR \\nYou looking for somebody?\\n\\nVICTOR \\nWha\\'?\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 16/25\\n\\nHECTOR \\nYou here to see somebody?\\n\\nVICTOR \\nYeah.\\n\\nHECTOR \\nWho?\\n\\nVICTOR \\nA girl named Amanda.\\n\\nHECTOR \\nWhat she look like?\\n\\nVICTOR \\nShe\\'s like this high, dark \\nhair, skinny\\n\\nHECTOR \\nYo, that\\'s my girl.\\n\\nVICTOR \\nShe didn\\'t say she had no \\nman.\\n\\nHECTOR \\nI suggest you turn around \\nand go back to where you \\ncame from.\\n\\nVictor looks over to Jenette. No response.\\n\\nHECTOR \\nWhat are you waiting for?\\n\\nA pause.\\n\\nHECTOR \\nYou betta bounce, yo.\\n\\nHector shoves Victor away from the stoop. Victor steps up to \\nHector. Jenette watches them. interested.\\n\\nHECTOR \\nYou betta leave the block, \\nyo, or me and my boys, \\nwe\\'re gonna fuck you up.\\n\\nVictor looks at Hector then walks away down the block.\\n\\nEXT. AMANDA\\'S BLOCK - CONTINUOUS \\n\\nVictor rounds the corner and sits down on the sidewalk.\\n\\nVICTOR \\n(to himself) \\n\\nFuck man. I\\'m gonna get a \\nfuckin\\' M-80 and shove it \\nup his retarded ass.\\n\\nEXT. AMANDA\\'S BLOCK - MOMENTS LATER\\n\\nCHRISTOPHER, an energetic ten-year-old, exits the front door of his \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 17/25\\n\\nbuilding \\nholding a bat, sits down on the curb and looks out at the block. As \\nthe camera \\npans, we see Aaron on the corner talking to Mari. Michelle and Donna \\nare walking \\ndown the block. Hector is making his way back over to Amanda\\'s \\nbuilding and \\nJenette is jumping rope.\\n\\nChris rubs his eyes, turns around and looks up at one of \\nthe windows in his building.\\n\\nCHRIS \\n(Up to the window) \\n\\nMa!\\n\\nNo answer.\\n\\nCHRIS \\nMa!\\n\\nMom\\'s head sticks out the window.\\n\\nCHRIS \\nCross me!\\n\\nMom waves her hand, signaling to him that it\\'s safe to \\ncross the street. Chris, picking up a half-deflated \\nfootball, runs into the street.\\n\\nChris makes his way down the block, stomping along in big \\nHigh-tops. He spots Aaron a few feet away.\\n\\nEXT. AMANDA\\'S BLOCK - CONTINUOUS \\n\\nAaron is sitting on the sidewalk crushing a soda bottle under his \\nfoot.\\n\\nChris approaches and tosses the ball to him.\\n\\nAaron stands up and tosses the ball back to Chris, then \\nlights a cigarette. Chris waits until Aaron is ready and \\nthrows again.\\n\\nAARON \\nIt\\'s too hot, get outta \\nhere.\\n\\nEXT. AMANDA\\'S BLOCK- CONTINUOUS \\n\\nMichelle and Donna are sitting on their stoop.\\n\\nDONNA \\nI want him to change. I \\nwant to get to know the \\nreal him and I want him to \\nget to know the real me.\\n\\nMichelle looks at Donna. A pause.\\n\\nDONNA \\nIt\\'s so frustrating. I ask \\nhim if he\\'s mad and he \\nsays no.\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 18/25\\n\\nEXT. AMANDA\\'S BLOCK - MOMENTS LATER\\n\\nVictor is still sitting on the sidewalk around the corner. \\n\\nChris walks by him.\\n\\nVICTOR \\nYo! You live here?\\n\\nChris nods.\\n\\nVICTOR \\nYou know Amanda?\\n\\nChris nods again, smiles and throws the ball at Victor. \\nVictor catches it and throws it back. Chris catches it and \\nthen starts to run away.\\n\\nVICTOR \\nHey, where you goin\\'?\\n\\nVictor starts to follow him.\\n\\nVICTOR \\nHold up, yo!\\n\\nEXT. ALLEYWAY - CONTINUOUS \\n\\nChris slips through a fence to enter the alleyway and Victor enters \\nbehind him. \\nA \"No Trespassing\" sign hangs on the gate. Victor looks around a \\nlittle as they continue their game of catch.\\n\\nAs the boys play, the gate creaks and swings open. The boys \\nquickly scurry into an out-of-the-way corner.\\n\\nAaron and Donna enter the alleyway. The boys watch them.\\n\\nAARON \\nAlright, tell me, what\\'d \\nyou hear?\\n\\nDONNA \\nThere\\'s a rumor that you \\nwere tryin\\' to get \\nsomebody to beat me up.\\n\\nAARON \\nWhat chu listening to \\nrumors for? I\\'m not like \\ndat.\\n\\nDONNA \\nIs it true?\\n\\nAaron puts his bag of fireworks down on the floor.\\n\\nAARON \\nI told you, no. I\\'m not \\nthat type.\\n\\nDONNA \\nThen I want you to go to \\nwhoever\\'s sayin\\' that and \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 19/25\\n\\ntell them to stop.\\n\\nAARON \\nAlright. \\n\\nAaron clears a piece of hair away from Donna\\'s face and \\nputs it behind her ear. A pause He gently pushes her up against the \\nwall.\\n\\nHe kisses her forehead. The camera follows as Aaron\\'s lips \\nmake their way to Donna\\'s. They kiss. Slowly at first, then \\ndeeply.\\n\\nVictor and Chris watch silently from the corner.\\n\\nEXT. AMANDA\\'S BLOCK- MOMENTS LATER\\n\\nJenette is siding on the ground, drawing with chalk on the \\nside walk. Victor approaches her and sits down on the \\nstoop.\\n\\nJENETTE \\nAmanda\\'s not back yet\\n\\nVictor runs his fingers through his hair. Jenette details \\nher artwork. She focuses intently on her drawing\\n\\nJENETTE \\n(With her eyes lowered) \\n\\nHow\\'s Hector?\\n\\nVictor doesn\\'t respond.\\n\\nJENETTE \\n(to Victor) \\n\\nSo, do you like her?\\n\\nJenette stares at the sidewalk.\\n\\nVICTOR \\nNo.\\n\\nJENETTE \\nSo, then whadda ya want?\\n\\nVictor stands up to leave.\\n\\nJENETTE \\nYou wanna do somethin\\' \\nwith me?\\n\\nVICTOR \\nNot really.\\n\\nJENETTE \\nHey!\\n\\nVICTOR \\nWha\\'?\\n\\nJenette makes eye contact. Victor makes his way back over to Jenette. \\nHe sits \\ndown beside her. Jenette\\'s eyes focus back on her drawing.\\n\\nJENETTE \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 20/25\\n\\nWhere you know Amanda \\nfrom?\\n\\nVICTOR \\nJus\\' from around the way.\\n\\nJENETTE \\nYou live around here?\\n\\nVICTOR \\nYeah.\\n\\nJENETTE \\nYou gotta girLfrLend?\\n\\nVictor sees Chris kicking his football across the street.\\n\\nJENETTE \\nYou wanna be my boyfriend?\\n\\nVictor doesn\\'t respond. A moment passes.\\n\\nJENETTE \\nHector\\'s an asshole, huh?\\n\\nJenette looks at Victor. She catches him looking across the \\nstreet.\\n\\nJENETTE \\n(to Victor) \\n\\nI know how ta get him back \\nif you want.\\n\\nVICTOR \\n(turning back) \\n\\nNah. \\n\\nJenette\\'s eyes drop down to the ground.\\n\\nShe quietly begins to sob. She holds her face in her hands. \\nFake tears.\\n\\nVICTOR \\nWhat\\'s the matter? You \\nalright?\\n\\nAaron rounds the corner of the block with Donna. 3enette \\ncatches a glimpse of him and starts sobbing loudly. Aaron \\nsees Jenette crying on the ground. He leaves Donna behind \\nand starts walking towards Jenette.\\n\\nVICTOR \\nWha\\'? I\\'ll do whatever you \\nwant. \\n\\nAaron reaches them. Victor looks up at him.\\n\\nAARON \\n(to Victor) \\n\\nWhat happened!\\n\\nJenette cries. Aaron grabs Victor\\'s arm tightly.\\n\\nAARON \\n(Angrily) \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 21/25\\n\\n\\'Wha\\' happened?\\n\\nJenette raises her head.\\n\\nJENETTE \\nHector-\\n\\nAARON \\nWhat? He hit you?\\n\\nShe sobs and nods \"yes.\"\\n\\nAaron scoops her up onto his shoulder and grabs Victor by \\nthe arm.\\n\\nAARON \\nC\\'mon.\\n\\nJenette\\'s chalk is left behind on the sidewalk.\\n\\nEXT. AMANDA\\'S BLOCK - CONTINUOUS\\n\\nAaron marches them all up the block. Hector\\'s silhouette \\nis visible in the distance as he cranks the pedal of an upside-down \\nbicycle.\\n\\nJenette bounces and sobs over Aaron\\'s shoulder as they trot \\nup the block. Victor struggles to keep up as his sneakers \\nbegin to skid on the cement.\\n\\nHector sees the three of them approaching.\\n\\nHe raises his arm and points a finger at Victor.\\n\\nHECTOR \\n(to Victor) \\n\\nI thought I told you to go \\nhome!\\n\\nAaron speeds up as he approaches Hector.\\n\\nAARON \\nYou hit my sista?\\n\\nJenette sobs in Aaron\\'s arms as he puts her down. \\nAaron releases Victor\\'s sleeve.\\n\\nSmack! Aaron hits Hector in the face. Hector falls. Aaron \\nturns and finds Victor turning away.\\n\\nAARON \\n(to Victor) \\n\\nYo! Get over here!\\n\\nVictor turns back towards the action. Holding Hector \\nagainst a wall, he pulls Victor near.\\n\\nAARON \\n(to Hector) \\n\\nWho told you to touch my \\nsister?\\n\\nHECTOR \\nI didn\\'t touch shit!\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 22/25\\n\\nSmack! Hector is pinned against the wall. He tries to free \\nhimself of Aaron but he is no match. Chris watches from the \\ncurb, amused.\\n\\nHECTOR \\nI didn\\'t do anything!\\n\\nHector struggles. Aaron looks to Victor.\\n\\nAARON \\nDid you see him?\\n\\nJenette turns her head to Victor. She wipes the tears from \\nher eyes.\\n\\nVictor looks at Hector. A moment passes. Chris plays with \\nhis bat as he watches.\\n\\nVICTOR \\nYeah.\\n\\nAaron punches Hector in the stomach. Hector doubles over.\\n\\nChris throws punches into the air.\\n\\nDISSOLVE TO\\n\\nEXT. AMANDA\\'S BLOCK - LATE AFTERNOON\\n\\nThe sun has dropped low in the sky. Long shadows rest on \\nThe pavement after a steamy afternoon.\\n\\nAmanda\\'s block is quiet and empty.\\n\\nChris strolls by Amanda\\'s stoop.\\n\\nHe notices Jenette\\'s drawing, bends down on his knees and \\nreads her sloppy writing.\\n\\n\"For entrance to secret passage press here.\"\\n\\nChris presses his finger into the circle she\\'s drawn. A \\nmoment passes. Nothing happens.\\n\\nA sound is heard atop Amanda\\'s stoop. Chris quickly walks \\naway. Amanda appears through her front door.\\n\\nShe sits down on her stoop.\\n\\nVictor is sitting on the curb across the street tapping an \\nempty bottle against the pavement. He sees Amanda.\\n\\nVictor approaches Amanda\\'s stoop.\\n\\nVICTOR \\nYo.\\n\\nAMANDA \\nHi.\\n\\nVICTOR \\nRemember me, from the \\npool?\\n\\nAMANDA\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 23/25\\n\\nUm. Yeah! Shorty!\\n\\nA pause.\\n\\nAMANDA \\nSo watcha doin\\'?\\n\\nVICTOR \\nNothin\\'.\\n\\nAMANDA \\nWhat are you doin\\' here?\\n\\nVICTOR \\nI, umm, came to see you.\\n\\nAMANDA \\nYou know somebody around \\nhere?\\n\\nVICTOR \\nNo. \\n\\n(He sighs) \\nWhat you do today?\\n\\nAMANDA \\nOh you know, cleaned the \\nhouse, cooked. Took care \\nof my little sisters. Sit \\ndown. So where\\'s Carlos?\\n\\nVICTOR \\nI guess he\\'s outside \\nsomeplace I don\\'t like \\ntakin\\' him down to certain \\nplaces.\\n\\nVictor sits down.\\n\\nAMANDA \\nWhadja wanna see me about?\\n\\nVICTOR \\nI just wanted to see you.\\n\\nA pause.\\n\\nAMANDA \\nSo you got a girl?\\n\\nVICTOR \\nOf course.\\n\\nAMANDA \\nSo what\\'s her name?\\n\\nVICTOR \\nYou know. I got a lot, \\nmore than one.\\n\\nAMANDA \\nA play-ya.\\n\\nVICTOR \\nYou got a boyfriend?\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 24/25\\n\\nAMANDA \\nMe? No. Don\\'t want none \\neither. Such bastards, man.\\n\\nA pause.\\n\\nAMANDA \\n(Quickly) \\n\\nThey play a girl, then you \\ncomplain, then they play \\ndumb, blah, blah, blah. \\nAll that bullshit, \\nwhatever I don\\'t want \\nnone. I\\'m gonna stay \\nsingle awhile, you know?\\n\\nA pause.\\n\\nAMANDA \\nSo wadda you do with your \\ngirls?\\n\\nVICTOR\\nJust chill.\\n\\nAMANDA \\nThat\\'s it?\\n\\nVICTOR \\nNah, we make out and \\nstuff.\\n\\nAmanda doesn\\'t believe him.\\n\\nAMANDA \\nSo what you think of me?\\n\\nVICTOR \\nYou look good.\\n\\nAMANDA \\nI look good, that\\'s it. So \\nwhat else do you do for \\nthese girls?\\n\\nVICTOR \\nI buy them flowers.\\n\\nAMANDA \\nHow you treat them?\\n\\nVICTOR \\nGood. I\\'m faithful to \\nthem.\\n\\nAmanda gets up and walks away. Victor quickly follows.\\n\\nEXT. ALLEYWAY - MOMENTS LATER\\n\\nAmanda walks through the half-open fence and leans flat \\nagainst the wall. Victor stands close by, nervously.\\n\\nHe keeps his distance from her.\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 25/25\\n\\nAMANDA\\nSee, I got you, you are so \\nscared. I don\\'t believe \\nthat you kissed no girls. \\nThat you got three girls \\nand that you faithful and \\nthis and that.\\n\\nVICTOR\\nI did.\\n\\nAMANDA \\nWell, you know I\\'m \\nstandin\\' here and you say \\nI look good?\\n\\nVICTOR \\nI kissed those girls.\\n\\nAMANDA \\nNo you didn\\'t, you ain\\'t \\nprovin\\'it.\\n\\nVICTOR \\nI aint gotta prove nothin\\' \\nto no girl, \\'cause I got \\nit like dat.\\n\\nAMANDA \\nOh, \\'cause you got it like \\ndat?\\n\\nVictor approaches Amanda. He touches her arm. Amanda \\nsmiles.\\n\\nShe takes Victor\\'s hand and places it on her breast. Victor \\nmoves forward. Amanda moves his hand over her breasts. She \\nwraps her arms around his waist. Victor bends his arms \\naround her back.\\n\\nAmanda hisses him on the lips, slowly. A long, deep kiss. \\nAs she kisses him she runs her hand through his hair. She \\npulls back. Victor looks around. Chris is at the entrance \\nof the alleyway, watching them. He is holding his deflated football.\\n\\nChris looks at him for a second and walks away.\\n\\nChris walks down the block, his bat against the pavement.\\n\\nFADE OUT\\n\\n\\n'),\n", + " Document(metadata={'source': 'https://dl.boxcloud.com/api/2.0/internal_files/1169674971571/versions/1274131573171/representations/extracted_text/content/', 'title': 'FIVE_FEET_AND_RISING_by_Peter_Sollett_pdf'}, page_content='\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 1/25\\n\\nFIVE FEET AND RISING\\n\\nby\\n\\nPeter Sollett\\n\\nFADE IN:\\n\\nEXT. 8TH STREET BETWEEN AVENUES C AND D - DAY \\n\\nA group of dark-skinned girls wearing cheerleading outfits \\nalign themselves in formation on the sidewalk. They begin to \\ndance. No music can be heard. The sound of the girls\\' bodies \\nis our soundtrack. We hear their strained breathing, palms \\nand sneaker bottoms pounding while they hum and count softly \\nto themselves in an effort to keep the rhythm.\\n\\nSLO-MO: We explore the bodies of the dancers; their bright \\neyes and sweaty brows, their stomping feet and colliding \\nhands (dark side and light side). The younger girls perform \\nprovocative dance movements, the older girls repeat them.\\n\\nTheir bodies silhouette in the bright sunlight.\\n\\nCUT TO: TITLES\\n\\nEXT. AMANDA\\'S BLOCK - DAY \\n\\nAMANDA, a tall 14-year-old exits the front door of her \\napartment budding with her headphones in one hand and a \\nmagazine in the other. She sits down on her stoop, puts her \\nheadphones on and presses \"play\". We can hear the sound of \\nSalsa leaking out of the sides of her headphones. JENETTE, \\nten years old with big black hair in rubber-band restraints, sits on \\nthe sidewalk below Amanda drawing with a piece of chalk. Jenette \\n\\n looks over her shoulder and sees Amanda reading her magazine Jenette \\nclimbs the stairs and sits down beside her.\\n\\nThe camera pans to reveal AARON, an 18-year-old boy on the \\nother side of the street, unloading some fireworks from the \\ntrunk of a car. He\\'s filling a paper bag with them, \\ncarefully making sure not to reveal what he\\'s doing to \\nonlookers.\\n\\nAt the ear end of the block, DONNA, 14, and MICHELLE, 12, \\nsit and watch Aaron at work.\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 2/25\\n\\nMICHELLE \\nHow does he look up close?\\n\\nDONNA \\n(Amorously) \\n\\nUmm, he got dark brownish \\neyes, he got a nice nose I \\nlove his nose. I love his \\nskin. I love his lips, he \\ngot a great smile and he \\ngot-\\n\\nMICHELLE \\nA bad attitude.\\n\\nDONNA \\nYeah, he got a bad \\nattitude.\\n\\nMICHELLE \\nYou said before, that he \\ngot boxes?\\n\\nSLO-MO: The camera is very close to Aaron. Details of his \\nbody in a shallow depth of field.\\n\\nDONNA \\nYeah, he got boxes in his \\nstomach. He\\'s taller than \\nme.\\n\\nMICHELLE \\nHow old is he?\\n\\nDONNA \\nI think he\\'s 18 or 17.\\n\\nMICHELLE \\nYou gonna talk to him?\\n\\nDONNA \\nUm, yeah I think so.\\n\\nBack on Amanda\\'s stoop.\\n\\nJENETTE \\nYou still like him.\\n\\nAMANDA \\n(With a sigh of negative attitude) \\n\\nNo.\\n\\nHECTOR, a mature-looking 13-year old is crossing the \\nstreet. He enters frame with Amanda and Jenette. \\n\\nHECTOR \\nYo, wuzzup. \\n\\nAmanda ignores him. \\n\\nJENETTE \\nHi Hector\\n\\nHECTOR \\n(To Amanda) \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 3/25\\n\\nOh, you\\'re not gonna say \\nhello.\\n\\nAMANDA\\'S POV: Donna approaches Aaron as he locks up the \\ntrunk of the car. She hesitantly calls over to him. He \\nacknowledges her with a lift of the chin. Making sure he \\nkeeps his distance from her, he looks around to see if \\nanyone is watching him. He tosses his head for her to \\nfollow. He begins to walk away down the block. She follows.\\n\\nHector is looking at Amanda. He appears to have run out of \\nthings to say. Amanda removes her headphones. Her music \\nbecomes clearer, more audible.\\n\\nHECTOR \\nYo, you gonna keep me \\nhangin\\' like dat?\\n\\nAMANDA \\nHector, Yo try to rap to me every day, why don\\'t you \\ntake your three-quarters retarded ass outta here?\\n\\nHECTOR \\nYo, you betta give me my \\nrespects or I\\'ll tell your \\nlittle girl ova here what \\nI heard about you and my \\nboy.\\n\\nAmanda puts her headphones back on.\\n\\nHECTOR looks like he got a new girl anyway.\\n\\nEXT. 8TH STREET BETWEEN AVENUES C AND D - AFTERNOON \\n\\nVICTOR, a skinny 12-year-old with sloppy hair, is asleep in \\nthe sun on his fire escape. There is sweat beaded up on his \\nbody. His shirt is rolled up behind his head like a pillow. \\nHis breath is heavy, his chest rises and falls. The camera \\ntilts to reveal CARLOS, ten, rounding the corner on the \\nstreet below. The camera tracks backwards as Carlos \\napproaches. He is talking to himself.\\n\\nCARLOS \\n(To himself) \\n\\nWhatcha gonna do when ya \\nbitch is untrue?\\n\\nCarlos lifts his head up to look at the fire escapes.\\n\\nCARLOS \\nYo Victor!\\n\\nThe camera pans and tilts up to the fire escapes. The \\nbuildings float by. He arrives in front of Victor\\'s \\nbuilding and cups his hand around his mouth.\\n\\nCARLOS \\nYo Victor!\\n\\nOn the fire escape, Victor\\'s eyes pop open and the sun \\nshines into them.\\n\\nVICTOR \\n(Dazed) \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 4/25\\n\\nWhat, wuzzup?\\n\\nVictor sits up and looks over the side of the fire escape.\\n\\nCARL0S \\nCome down!\\n\\nVICTOR \\nI can\\'t!\\n\\nCARLOS \\nWhy?\\n\\nVICTOR \\nI got punished, man.\\n\\nCARLOS \\nFa what?\\n\\nVICTOR \\nI won\\'t let my motha cut \\nmy hair.\\n\\nCARLOS \\nWha\\'?\\n\\nVICTOR \\nShe fucks it all up!\\n\\nCARLOS \\nForget it! C\\'mon Let\\'s go \\nto the pool.\\n\\nVICTOR \\nI can\\'t man, I\\'m punished!\\n\\nCARL0S \\nWho gives!\\n\\nVICTOR \\nI can\\'t, I\\'m gonna get \\npunished more!\\n\\nCARLOS \\nTrust me, I always get \\ninto trouble, c\\'mon!\\n\\nVictor sits down on the fire escape. Carlos pauses for a \\nminute and turns his back on Victor.\\n\\nCARL0S \\nC\\'mon! The girls are \\nwaiting for you!\\n\\nVictor hops back up.\\n\\nVICTOR \\n(interested) \\n\\nThey are?\\n\\nCARLOS \\nYeah! Tell me which one you would like. To be doin\\' \\nnothin on a fire escape or beat the pool with a bunch of \\ngirls? Be straight up!\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 5/25\\n\\nVICTOR \\nI\\'ll be right down.\\n\\nVictor climbs down the fire escape and hops down to the \\nstreet. he immediately grabs Carlos and starts pushing him \\ndown the block to avoid being seen from above.\\n\\nEXT. THE CORNER OF 8TH STREET AND AVENUE D - CONTINUOUS \\n\\nThe boys safely round the corner onto Avenue D. Victor \\nperks up and starts nudging Carlos.\\n\\nVICTOR \\nSo what girls are over \\nthere?\\n\\nCARLOS \\nNatasha, Maria, Tina-\\n\\nVICTOR \\nThese are the pretty girls \\nyou told me to come down \\nfor?\\n\\nVictor sighs and runs his fingers through his hair.\\n\\nCARLOS \\nWhat\\'s the difference, you \\nnever do anything anyway\\n\\nVictor makes a disagreeing gesture. Carlos drags Victor \\nDowntown.\\n\\nVICTOR \\nWhat are you going that \\nway for?\\n\\nCARLOS \\nI\\'m not goin\\' to 10th \\nStreet, people piss and \\nshit in that pool,\\n\\nVICTOR \\nWhere you goin\\'?\\n\\nCARLOS \\nPitt.\\n\\nVICTOR \\nOh man, what we gotta \\nleave ar\\' own neighborhood \\nfor?\\n\\nCARLOS \\nC\\'mon.\\n\\nVICTOR \\nMan, if I go down you\\'re \\ngoin\\' down with me.\\n\\nEXT. AVENUE D - CONTINUOUS \\n\\nMONTAGE: Victor and Carlos hang on each other as they walk down \\n Avenue D towards the Pitt Street Pool. They pass by Victor brother, \\n giving him an impromptu smack and then bang on a store window to wave \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 6/25\\n\\nhello to a friend.\\n\\nFrom Victor\\'s POV we work our way through a crowd of people \\nan cross 3rd Street. Victor looks back at the crowd with a \\nwatchful eye. The camera tracks along in the street as the \\nboys walk along the sidewalk. Victor looks up at a street \\nsign. It reads, \"Houston St.\" As the boys make their way \\nacross the wide intersection, the heat is slightly visible \\nas car exhaust fills the gridlocked lanes. Victor and \\nCarlos walk calmly, with space between them, making their \\nway towards the camera in a shallow depth of field as we \\nfollow focus on them.\\n\\nEXT. PITT STREET POOL - CONTINUOUS \\n\\nVictor and Carlos stand on line outside the pool gates. Police \\nexamine the boys as they slowly inch their way into the park.\\n\\nFrom Victor\\'s POV we see the expanse of the pool as he \\nenters the park. We watch as he surveys the area.\\n\\nFrom a high angle we see Carlos nudge Victor to make his \\nway onto the pool deck. They enter the crowd, proceeding \\ncarefully, making sure not to bump anyone.\\n\\nAs they continue to walk, Victor\\'s POV reveals the bodies \\nof older boys and girls, rough water and mischievous kids.\\n\\nEXT. PITT STREET POOL - CONTINUOUS \\n\\nAmanda is sitting poolside with Jenette.\\n\\nAMANDA \\nAnd that girl, over there? \\nHoochie.\\n\\nJenette looks out across the pool trying to see who Amanda \\nis talking about.\\n\\nAMANDA \\nAnd him. Stay away from \\nhim, he\\'s only interested \\nin that.\\n\\nAmanda points between Jenette\\'s legs.\\n\\nAMANDA \\nThat right there.\\n\\nCarlos steps in front of them, Amanda smacks his leg.\\n\\nCARLOS \\nYo Amanda, wassup?\\n\\nThey kiss on the cheek.\\n\\nCARLOS \\n(To Victor) \\n\\nAmanda is Eddie\\'s cousin.\\n\\nVICTOR \\nEddie from Compost?\\n\\nCARLOS \\nNo, Baruch.\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 7/25\\n\\nA girl swimming in the pool calls over to Carlos.\\n\\nGIRL 1 \\nCarlos! Carlos, get your \\nskinny ass over here!\\n\\nCARLOS \\n(To Victor) \\n\\nStay right here, I\\'ll be \\nright back.\\n\\nCarlos walks off leaving Victor standing next to Amanda and \\nJenette. Victor looks uncomfortable.\\n\\nAMANDA \\nWho are you?\\n\\nVICTOR \\nI\\'m wit\\' Carlos.\\n\\nAmanda points out across the pool.\\n\\nAMANDA \\n(To Jenette) \\n\\nHim right there, That\\'s \\nwho I\\'m talkin\\' about. \\n\\n(to Victor) \\nExcuse me, can you move, I \\ncan\\'t see.\\n\\nAmanda spots Aaron and Donna in the distance.\\n\\nAMANDA \\nDo you have a name?\\n\\nVICTOR \\nVictor.\\n\\nAMANDA \\nWhat?\\n\\nVICTOR \\nVictor.\\n\\nAmanda turns to Jenette and continues talking to her.\\n\\nVICTOR \\nUmm, I\\'m gonna go find \\nCarlos.\\n\\nAs Victor turns to walk, the camera pans to follow him, \\nrevealing Hector who is approaching Amanda. The camera then \\npans back to Amanda. She sighs and turns her ahead away \\nfrom him.\\n\\nEXT. PITT STREET POOL - CONTINUOUS \\n\\nIn the playground area at the Pitt Street Pool, Aaron is doing a\\' \\nimpression of Al Pacino. Darrell and Boy 1 look on.\\n\\nAARON \\n(to Boy 1) \\n\\nYou wanna meet my little \\nfriend? \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 8/25\\n\\nBoy 1 is laughing at Aaron. Donna and Michelle stand nearby \\nwatching.\\n\\nAARON \\nDon\\'t fuck wit\\' me! Don\\'t \\nfuck wit\\' me.\\n\\n(pointing his finger)\\nMy lawyer\\'s so good he\\'ll \\nhave you workin in Alaska, \\nso dress warn.\\n\\nDONNA \\nAaron, how you doin\\'?\\n\\nAARON \\nFine.\\n\\nDONNA \\nLook at me.\\n\\nAARON \\nWhat?\\n\\nDONNA \\nWhy you have an attitude \\nfor?\\n\\nAARON \\nNot now, I\\'m busy\\n\\nDONNA \\nGod, I just wanna speak to \\nyou. I just wanna speak \\nto you the way I feel \\nabout you.\\n\\nAARON \\nHurry up, you\\'re wastin\\' \\nmy time, what the fuck.\\n\\nAaron turns back to his friends.\\n\\nDONNA \\nPlease don\\'t scream at me. \\nI like you, but I don\\'t \\nlike the way your attitude \\nis.\\n\\nAARON \\nSo get the fuck outta \\nhere.\\n\\nDarrell and Boy 1 approve. They wait for Donna\\'s reply.\\n\\nDONNA \\nI wanna go out with you, I \\nwant to be part of your \\nlife. I want you to treat \\nme the way a girlfriend \\nshould be treated.\\n\\nAARON \\nThen don\\'t go out with me.\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 9/25\\n\\nDONNA \\nFor once in your life have \\nsome respect for me, \\ndon\\'t even curse at me or \\nnothin\\'.\\n\\nAARON \\n(to Darrell) \\n\\nNow she\\'s tellin\\' me what \\nthe fuck to do.\\n\\nDONNA \\nGod, you drive me crazy. I \\njust want you to know how \\nI feel and you don\\'t \\nunderstand.\\n\\nAARON \\nJust get the fuck outta \\nhere.\\n\\nDonna stares at Aaron as he turns back to his friends. \\nMichelle walks up to Donna and gently leads her away.\\n\\nAARON \\nThat girl be trippin\\'. \\n\\n(Back into his Pacino impression) \\nOne time I let her kiss my \\nrings and forever \\nshe tries to repay me!\\n\\nEXT. PITT STREET POOL - LATER \\n\\nVictor and Carlos are playing, trying to force each others\\' heads \\nunderwater. \\nCarlos squirts water through his lips.\\n\\nVICTOR \\nI gotta go take a piss.\\n\\nCARLOS \\nIf we were at 10th Street \\nPool you woulda done it \\nright in the water, right?\\n\\nThe camera pans as Victor climbs out of the pool and onto a \\nlong line. As he stands and waits, Amanda can be seen in \\nthe background arguing with Hector.\\n\\nIn the water, Carlos makes a face at Victor. Victor makes \\none back.\\n\\nVICTOR \\n(Under his breath to Carlos) \\n\\nI\\'m gonna beat you.\\n\\nEXT. PITT STREET POOL - CONTINUOUS \\n\\nHector and Amanda have been arguing. Jenette is sitting on the ground \\nbeneath \\nthem.\\n\\nHECTOR \\nI know you likes me.\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 10/25\\n\\nAMANDA \\nI eave me alone!\\n\\nHECTOR \\nWhy don\\'t you share the \\nwealth a little bit?\\n\\nHector grabs her wrist.\\n\\nAMANDA \\nExcuse me! I gotta go to \\nthe ladies\\' room!\\n\\nEXT. PITT STREET POOL - MOMENTS LATER \\n\\nAmanda gets on line behind Victor as he continues to antagonize \\nCarlos in the \\ndistance. Amanda recognizes Victor from behind, peeking \\nover his shoulder at the side of his face.\\n\\nAMANDA \\nShorty!\\n\\nVictor turns around to Amanda.\\n\\nAMANDA \\nWussup?\\n\\nVICTOR \\nWussup.\\n\\nHe turns back around.\\n\\nAMANDA \\nYo shorty!\\n\\nVictor turns back around.\\n\\nVICTOR \\nWhat?\\n\\nAmanda hears something over her shoulder and spins her head \\naround.\\n\\nAMANDA \\n(to Hector) \\n\\nLeave me alone! \\n(To Victor) \\n\\nYo, I know another \\nbathroom over there, c\\'mon \\nthis line\\'s too long.\\n\\nAmanda takes Victor by the hand and walks towards Hector. \\nShe bumps into him with Victor.\\n\\nAMANDA \\nExcuse us.\\n\\nAmanda gives Hector a snotty grin.\\n\\nShe drags Victor away.\\n\\nEXT. PITT STREET POOL - CONTINUOUS \\n\\nIn a small corner, out of sight to the rest of the kids at the pool. \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 11/25\\n\\nAmanda \\ncomplains to Victor. Victor looks confused.\\n\\nAMANDA \\nThis fucking guy be \\nfollowin\\' me around, and \\ntouchin\\' me. Asshole!\\n\\nShe sighs and pulls on the bathroom door. It\\'s locked. She \\ngives it another try. It won\\'t budge.\\n\\nAMANDA \\nLook, just do me a favor. \\nStand right here, okay?\\n\\nAmanda takes Victor\\'s hand for balance and squats down, \\npulling her bathing suit bottoms to the side. She urinates. \\nVictor watches her, trying to play it cool. The camera \\ntilts up from Amanda\\'s face peeking up at Victor, to their \\nhands straining for balance, to Victor\\'s wandering eyes.\\n\\nEXT. 8TH STREET BETWEEN AVENUES C AND D - DAY \\n\\nClose-up of ERICA looking into the camera.\\n\\nERICA \\nWe\\'re \"Fantasy\" and This \\nis Shai, Diamond-\\n\\nFRANCESCA \\nAnd I\\'m Melody.\\n\\nWe see the three girls standing in line on the sidewalk.\\n\\nERICA \\nAnd we\\'re gonna sing a \\nsong called \\'Tell me \\nWhat.\\' It was written by \\nmyself, Diamond and Shai \\nand the vocals were \\narranged by us two.\\n\\nFrancesca rolls her eyes.\\n\\nERICA \\nAlso, it was written May \\n24th 1998 at 10:20 p.m. \\nCheck it out.\\n\\nThe girls begin to sing.\\n\\nCarlos stands in front of the singers mocking them. The \\ncamera pans to see Victor approaching Carlos.\\n\\nVICTOR \\nWussup?\\n\\nCARL0S \\nWussup, Victor.\\n\\nVICTOR \\nYo, can I talk to you for \\na minute?\\n\\nCarlos nods his head. Victor leans into Carlos, resting his \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 12/25\\n\\narm on Carlos\\' shoulder.\\n\\nVICTOR \\nYo, remember from the \\npool, that girl?\\n\\nCARLOS \\nWhich one?\\n\\nVICTOR \\nYou know, Eddie\\'s cousin.\\n\\nCARL0S \\nEddie from Compost?\\n\\nVICTOR \\nEddie from Baruch, the one \\nwho was sittin\\' wit\\' dat \\nlittle girl;\\n\\nCARLOS \\nThe one with the phat ass?\\n\\nVICTOR \\nNo, c\\'mon, stop playin\\'. \\nThe girl that you kissed \\nwhen we got there. Where s\\nhe live at?\\n\\nCARL0S \\nWhy don\\'t you ask Eddie?\\n\\nVICTOR \\nYo, Carlos-I\\'m gonna punch \\nyou.\\n\\nCARLOS \\n(Mockingly) \\n\\nI\\'m gonna punch you. What \\nyou want with her anyway? \\nYou in love with her?\\n\\nVICTOR \\nShe lives near Eddie?\\n\\nCARLOS \\nI think she lives down by \\nPitt.\\n\\nVICTOR \\nNear Natasha\\'s? Or over by \\nBoy\\'s Club?\\n\\nCARLOS \\nI think by Twenty-two.\\n\\nVICTOR \\nFor real?\\n\\nCARLOS \\nWhat you want with her \\nanyway\\'\\n\\nVictor starts walking off down the block.\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 13/25\\n\\nCARLOS \\nYo! What you goin\\' for\\n\\nVICTOR\\n\\'Cause you know what, \\nyou\\'re not supposed to \\nknow but yesterday she \\nlent me her pills for her \\nMoms and if I don\\'t give \\n\\'em to her she\\'s gonna \\ndie. You want her to die?\\n\\nCarlos shrugs Victor off as he walks away down the block.\\n\\nA moment passes.\\n\\nCARLOS \\n(to himself)\\n\\nWhat do you do when your \\nbitch is untrue? \\nYou cut the hooker off and \\nfind someone new. I need \\nanother bitch another \\nbitch in my life.\\n\\nEXT. LOWER EAST SIDE - CONTINUOUS \\n\\nMONTAGE: Victor\\'s trip through the streets in search of Amanda\\'s \\nblock.\\n\\nEXT. AMANDA\\'S BLOCK - LATER \\n\\nDonna and Michelle are standing in front of their building.\\n\\nMICHELLE \\nOkay, merengue, you do \\nlike this-\\n\\nMichelle places one hand on her side, the other in the air \\nand begins to step.\\n\\nDONNA \\nLike this?\\n\\nMICHELLE \\nYeah, that\\'s right, you \\ngot it girl.\\n\\nMichelle grabs Donna, they embrace and dance.\\n\\nMICHELLE \\nNow salsa, you know how to \\ndance salsa?\\n\\nDONNA \\nYeah.\\n\\nMICHELLE \\nOkay, then dance. Show.\\n\\nDonna dances. Michelle looks over her shoulder. Aaron is \\ndrinking a bottle of soda across the street.\\n\\nMICHELLE \\nI don\\'t think he\\'s \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 14/25\\n\\nwatching.\\n\\nEXT. AMANDA\\'S BLOCK - CONTINUOUS \\n\\nAaron sits on a stoop across the street from Michelle and Donna He\\'s \\nshaking up \\na bottle of soda, then opening the cap to let the bubbles \\nout. He\\'s got a large brown paper bag with him. Victor the rounds the \\ncorner, \\nhis eyes are scanning across the buildings on the block.\\n\\nAARON \\nYo Shorty, you wanna buy \\nsome M-80s?\\n\\nVICTOR \\nNah.\\n\\nAARON\\nTwenty-four for two \\ndollars, son, and ain\\'t \\ntalkin\\' no little pussy \\nboxes, I\\'m talkin\\' big \\nones.\\n\\nVICTOR \\nNah.\\n\\nAARON \\nAlright, I\\'ll be here, if \\nanything.\\n\\nVictor continues down the block.\\n\\nEXT. AMANDA\\'S BLOCK - CONTINUOUS \\n\\nVictor finds Jenette sitting on Amanda\\'s stoop. She appears to have \\njust come \\noutside as she unties a jump rope that is knotted around her \\nwaist Victor stands next to her for a moment waiting awkwardly to \\nspeak.\\n\\nJenette is ignoring him.\\n\\nAaron watches from down the block.\\n\\nVictor steps towards Jenette. As he turns to face her, she \\nis roll her sock down to her ankle and preparing to jump \\n\\nher rope.\\n\\nVICTOR \\nHey, little girl, you know \\na girl named Amanda who \\nlives around here? \\n\\nJENETTE \\nNo.\\n\\nJenette stands sloppily in front of him on the street. She \\nsays nothing and begins to jump. Smic-smac, smic-smac, \\nsmic-smack.\\n\\nVICTOR \\nYou sure? She\\'s got kind \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 15/25\\n\\nof like brown hair.\\n\\nJENETTE \\nPositive.\\n\\nVICTOR \\nYou sure?\\n\\nJENETTE \\nPositive.\\n\\nVICTOR \\nMy friend told me she \\nlived around here.\\n\\nJENETTE \\nYour friend must be \\nmisinformed.\\n\\nVICTOR \\nDidn\\'t I see you at Pitt \\nyesterday?\\n\\nA pause.\\n\\nJENETTE \\nSo what do you want with \\nher anyway?\\n\\nVICTOR \\nI\\'m a good friend of hers.\\n\\nJENETTE \\nHow do I know you\\'re not \\nlying.\\n\\nVICTOR \\nYo, I know what you\\'re \\nthinking, that I\\'m one of \\nthose guys that keep \\ncoming up to her.\\n\\nJENETTE \\nProbably.\\n\\n(Under her breath) \\nOne of the many.\\n\\nVICTOR \\nWhat?\\n\\nJENETTE \\nNothing.\\n\\nHector approaches Victor from down the block.\\n\\nHECTOR \\nExcuse me, can I help you?\\n\\nVictor doesn\\'t answer.\\n\\nHECTOR \\nYou looking for somebody?\\n\\nVICTOR \\nWha\\'?\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 16/25\\n\\nHECTOR \\nYou here to see somebody?\\n\\nVICTOR \\nYeah.\\n\\nHECTOR \\nWho?\\n\\nVICTOR \\nA girl named Amanda.\\n\\nHECTOR \\nWhat she look like?\\n\\nVICTOR \\nShe\\'s like this high, dark \\nhair, skinny\\n\\nHECTOR \\nYo, that\\'s my girl.\\n\\nVICTOR \\nShe didn\\'t say she had no \\nman.\\n\\nHECTOR \\nI suggest you turn around \\nand go back to where you \\ncame from.\\n\\nVictor looks over to Jenette. No response.\\n\\nHECTOR \\nWhat are you waiting for?\\n\\nA pause.\\n\\nHECTOR \\nYou betta bounce, yo.\\n\\nHector shoves Victor away from the stoop. Victor steps up to \\nHector. Jenette watches them. interested.\\n\\nHECTOR \\nYou betta leave the block, \\nyo, or me and my boys, \\nwe\\'re gonna fuck you up.\\n\\nVictor looks at Hector then walks away down the block.\\n\\nEXT. AMANDA\\'S BLOCK - CONTINUOUS \\n\\nVictor rounds the corner and sits down on the sidewalk.\\n\\nVICTOR \\n(to himself) \\n\\nFuck man. I\\'m gonna get a \\nfuckin\\' M-80 and shove it \\nup his retarded ass.\\n\\nEXT. AMANDA\\'S BLOCK - MOMENTS LATER\\n\\nCHRISTOPHER, an energetic ten-year-old, exits the front door of his \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 17/25\\n\\nbuilding \\nholding a bat, sits down on the curb and looks out at the block. As \\nthe camera \\npans, we see Aaron on the corner talking to Mari. Michelle and Donna \\nare walking \\ndown the block. Hector is making his way back over to Amanda\\'s \\nbuilding and \\nJenette is jumping rope.\\n\\nChris rubs his eyes, turns around and looks up at one of \\nthe windows in his building.\\n\\nCHRIS \\n(Up to the window) \\n\\nMa!\\n\\nNo answer.\\n\\nCHRIS \\nMa!\\n\\nMom\\'s head sticks out the window.\\n\\nCHRIS \\nCross me!\\n\\nMom waves her hand, signaling to him that it\\'s safe to \\ncross the street. Chris, picking up a half-deflated \\nfootball, runs into the street.\\n\\nChris makes his way down the block, stomping along in big \\nHigh-tops. He spots Aaron a few feet away.\\n\\nEXT. AMANDA\\'S BLOCK - CONTINUOUS \\n\\nAaron is sitting on the sidewalk crushing a soda bottle under his \\nfoot.\\n\\nChris approaches and tosses the ball to him.\\n\\nAaron stands up and tosses the ball back to Chris, then \\nlights a cigarette. Chris waits until Aaron is ready and \\nthrows again.\\n\\nAARON \\nIt\\'s too hot, get outta \\nhere.\\n\\nEXT. AMANDA\\'S BLOCK- CONTINUOUS \\n\\nMichelle and Donna are sitting on their stoop.\\n\\nDONNA \\nI want him to change. I \\nwant to get to know the \\nreal him and I want him to \\nget to know the real me.\\n\\nMichelle looks at Donna. A pause.\\n\\nDONNA \\nIt\\'s so frustrating. I ask \\nhim if he\\'s mad and he \\nsays no.\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 18/25\\n\\nEXT. AMANDA\\'S BLOCK - MOMENTS LATER\\n\\nVictor is still sitting on the sidewalk around the corner. \\n\\nChris walks by him.\\n\\nVICTOR \\nYo! You live here?\\n\\nChris nods.\\n\\nVICTOR \\nYou know Amanda?\\n\\nChris nods again, smiles and throws the ball at Victor. \\nVictor catches it and throws it back. Chris catches it and \\nthen starts to run away.\\n\\nVICTOR \\nHey, where you goin\\'?\\n\\nVictor starts to follow him.\\n\\nVICTOR \\nHold up, yo!\\n\\nEXT. ALLEYWAY - CONTINUOUS \\n\\nChris slips through a fence to enter the alleyway and Victor enters \\nbehind him. \\nA \"No Trespassing\" sign hangs on the gate. Victor looks around a \\nlittle as they continue their game of catch.\\n\\nAs the boys play, the gate creaks and swings open. The boys \\nquickly scurry into an out-of-the-way corner.\\n\\nAaron and Donna enter the alleyway. The boys watch them.\\n\\nAARON \\nAlright, tell me, what\\'d \\nyou hear?\\n\\nDONNA \\nThere\\'s a rumor that you \\nwere tryin\\' to get \\nsomebody to beat me up.\\n\\nAARON \\nWhat chu listening to \\nrumors for? I\\'m not like \\ndat.\\n\\nDONNA \\nIs it true?\\n\\nAaron puts his bag of fireworks down on the floor.\\n\\nAARON \\nI told you, no. I\\'m not \\nthat type.\\n\\nDONNA \\nThen I want you to go to \\nwhoever\\'s sayin\\' that and \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 19/25\\n\\ntell them to stop.\\n\\nAARON \\nAlright. \\n\\nAaron clears a piece of hair away from Donna\\'s face and \\nputs it behind her ear. A pause He gently pushes her up against the \\nwall.\\n\\nHe kisses her forehead. The camera follows as Aaron\\'s lips \\nmake their way to Donna\\'s. They kiss. Slowly at first, then \\ndeeply.\\n\\nVictor and Chris watch silently from the corner.\\n\\nEXT. AMANDA\\'S BLOCK- MOMENTS LATER\\n\\nJenette is siding on the ground, drawing with chalk on the \\nside walk. Victor approaches her and sits down on the \\nstoop.\\n\\nJENETTE \\nAmanda\\'s not back yet\\n\\nVictor runs his fingers through his hair. Jenette details \\nher artwork. She focuses intently on her drawing\\n\\nJENETTE \\n(With her eyes lowered) \\n\\nHow\\'s Hector?\\n\\nVictor doesn\\'t respond.\\n\\nJENETTE \\n(to Victor) \\n\\nSo, do you like her?\\n\\nJenette stares at the sidewalk.\\n\\nVICTOR \\nNo.\\n\\nJENETTE \\nSo, then whadda ya want?\\n\\nVictor stands up to leave.\\n\\nJENETTE \\nYou wanna do somethin\\' \\nwith me?\\n\\nVICTOR \\nNot really.\\n\\nJENETTE \\nHey!\\n\\nVICTOR \\nWha\\'?\\n\\nJenette makes eye contact. Victor makes his way back over to Jenette. \\nHe sits \\ndown beside her. Jenette\\'s eyes focus back on her drawing.\\n\\nJENETTE \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 20/25\\n\\nWhere you know Amanda \\nfrom?\\n\\nVICTOR \\nJus\\' from around the way.\\n\\nJENETTE \\nYou live around here?\\n\\nVICTOR \\nYeah.\\n\\nJENETTE \\nYou gotta girLfrLend?\\n\\nVictor sees Chris kicking his football across the street.\\n\\nJENETTE \\nYou wanna be my boyfriend?\\n\\nVictor doesn\\'t respond. A moment passes.\\n\\nJENETTE \\nHector\\'s an asshole, huh?\\n\\nJenette looks at Victor. She catches him looking across the \\nstreet.\\n\\nJENETTE \\n(to Victor) \\n\\nI know how ta get him back \\nif you want.\\n\\nVICTOR \\n(turning back) \\n\\nNah. \\n\\nJenette\\'s eyes drop down to the ground.\\n\\nShe quietly begins to sob. She holds her face in her hands. \\nFake tears.\\n\\nVICTOR \\nWhat\\'s the matter? You \\nalright?\\n\\nAaron rounds the corner of the block with Donna. 3enette \\ncatches a glimpse of him and starts sobbing loudly. Aaron \\nsees Jenette crying on the ground. He leaves Donna behind \\nand starts walking towards Jenette.\\n\\nVICTOR \\nWha\\'? I\\'ll do whatever you \\nwant. \\n\\nAaron reaches them. Victor looks up at him.\\n\\nAARON \\n(to Victor) \\n\\nWhat happened!\\n\\nJenette cries. Aaron grabs Victor\\'s arm tightly.\\n\\nAARON \\n(Angrily) \\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 21/25\\n\\n\\'Wha\\' happened?\\n\\nJenette raises her head.\\n\\nJENETTE \\nHector-\\n\\nAARON \\nWhat? He hit you?\\n\\nShe sobs and nods \"yes.\"\\n\\nAaron scoops her up onto his shoulder and grabs Victor by \\nthe arm.\\n\\nAARON \\nC\\'mon.\\n\\nJenette\\'s chalk is left behind on the sidewalk.\\n\\nEXT. AMANDA\\'S BLOCK - CONTINUOUS\\n\\nAaron marches them all up the block. Hector\\'s silhouette \\nis visible in the distance as he cranks the pedal of an upside-down \\nbicycle.\\n\\nJenette bounces and sobs over Aaron\\'s shoulder as they trot \\nup the block. Victor struggles to keep up as his sneakers \\nbegin to skid on the cement.\\n\\nHector sees the three of them approaching.\\n\\nHe raises his arm and points a finger at Victor.\\n\\nHECTOR \\n(to Victor) \\n\\nI thought I told you to go \\nhome!\\n\\nAaron speeds up as he approaches Hector.\\n\\nAARON \\nYou hit my sista?\\n\\nJenette sobs in Aaron\\'s arms as he puts her down. \\nAaron releases Victor\\'s sleeve.\\n\\nSmack! Aaron hits Hector in the face. Hector falls. Aaron \\nturns and finds Victor turning away.\\n\\nAARON \\n(to Victor) \\n\\nYo! Get over here!\\n\\nVictor turns back towards the action. Holding Hector \\nagainst a wall, he pulls Victor near.\\n\\nAARON \\n(to Hector) \\n\\nWho told you to touch my \\nsister?\\n\\nHECTOR \\nI didn\\'t touch shit!\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 22/25\\n\\nSmack! Hector is pinned against the wall. He tries to free \\nhimself of Aaron but he is no match. Chris watches from the \\ncurb, amused.\\n\\nHECTOR \\nI didn\\'t do anything!\\n\\nHector struggles. Aaron looks to Victor.\\n\\nAARON \\nDid you see him?\\n\\nJenette turns her head to Victor. She wipes the tears from \\nher eyes.\\n\\nVictor looks at Hector. A moment passes. Chris plays with \\nhis bat as he watches.\\n\\nVICTOR \\nYeah.\\n\\nAaron punches Hector in the stomach. Hector doubles over.\\n\\nChris throws punches into the air.\\n\\nDISSOLVE TO\\n\\nEXT. AMANDA\\'S BLOCK - LATE AFTERNOON\\n\\nThe sun has dropped low in the sky. Long shadows rest on \\nThe pavement after a steamy afternoon.\\n\\nAmanda\\'s block is quiet and empty.\\n\\nChris strolls by Amanda\\'s stoop.\\n\\nHe notices Jenette\\'s drawing, bends down on his knees and \\nreads her sloppy writing.\\n\\n\"For entrance to secret passage press here.\"\\n\\nChris presses his finger into the circle she\\'s drawn. A \\nmoment passes. Nothing happens.\\n\\nA sound is heard atop Amanda\\'s stoop. Chris quickly walks \\naway. Amanda appears through her front door.\\n\\nShe sits down on her stoop.\\n\\nVictor is sitting on the curb across the street tapping an \\nempty bottle against the pavement. He sees Amanda.\\n\\nVictor approaches Amanda\\'s stoop.\\n\\nVICTOR \\nYo.\\n\\nAMANDA \\nHi.\\n\\nVICTOR \\nRemember me, from the \\npool?\\n\\nAMANDA\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 23/25\\n\\nUm. Yeah! Shorty!\\n\\nA pause.\\n\\nAMANDA \\nSo watcha doin\\'?\\n\\nVICTOR \\nNothin\\'.\\n\\nAMANDA \\nWhat are you doin\\' here?\\n\\nVICTOR \\nI, umm, came to see you.\\n\\nAMANDA \\nYou know somebody around \\nhere?\\n\\nVICTOR \\nNo. \\n\\n(He sighs) \\nWhat you do today?\\n\\nAMANDA \\nOh you know, cleaned the \\nhouse, cooked. Took care \\nof my little sisters. Sit \\ndown. So where\\'s Carlos?\\n\\nVICTOR \\nI guess he\\'s outside \\nsomeplace I don\\'t like \\ntakin\\' him down to certain \\nplaces.\\n\\nVictor sits down.\\n\\nAMANDA \\nWhadja wanna see me about?\\n\\nVICTOR \\nI just wanted to see you.\\n\\nA pause.\\n\\nAMANDA \\nSo you got a girl?\\n\\nVICTOR \\nOf course.\\n\\nAMANDA \\nSo what\\'s her name?\\n\\nVICTOR \\nYou know. I got a lot, \\nmore than one.\\n\\nAMANDA \\nA play-ya.\\n\\nVICTOR \\nYou got a boyfriend?\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 24/25\\n\\nAMANDA \\nMe? No. Don\\'t want none \\neither. Such bastards, man.\\n\\nA pause.\\n\\nAMANDA \\n(Quickly) \\n\\nThey play a girl, then you \\ncomplain, then they play \\ndumb, blah, blah, blah. \\nAll that bullshit, \\nwhatever I don\\'t want \\nnone. I\\'m gonna stay \\nsingle awhile, you know?\\n\\nA pause.\\n\\nAMANDA \\nSo wadda you do with your \\ngirls?\\n\\nVICTOR\\nJust chill.\\n\\nAMANDA \\nThat\\'s it?\\n\\nVICTOR \\nNah, we make out and \\nstuff.\\n\\nAmanda doesn\\'t believe him.\\n\\nAMANDA \\nSo what you think of me?\\n\\nVICTOR \\nYou look good.\\n\\nAMANDA \\nI look good, that\\'s it. So \\nwhat else do you do for \\nthese girls?\\n\\nVICTOR \\nI buy them flowers.\\n\\nAMANDA \\nHow you treat them?\\n\\nVICTOR \\nGood. I\\'m faithful to \\nthem.\\n\\nAmanda gets up and walks away. Victor quickly follows.\\n\\nEXT. ALLEYWAY - MOMENTS LATER\\n\\nAmanda walks through the half-open fence and leans flat \\nagainst the wall. Victor stands close by, nervously.\\n\\nHe keeps his distance from her.\\n\\n\\n\\n3/20/23, 5:31 PM FIVE FEET AND RISING by Peter Sollett\\n\\nwww.dailyscript.com/scripts/fivefeetandrising.html 25/25\\n\\nAMANDA\\nSee, I got you, you are so \\nscared. I don\\'t believe \\nthat you kissed no girls. \\nThat you got three girls \\nand that you faithful and \\nthis and that.\\n\\nVICTOR\\nI did.\\n\\nAMANDA \\nWell, you know I\\'m \\nstandin\\' here and you say \\nI look good?\\n\\nVICTOR \\nI kissed those girls.\\n\\nAMANDA \\nNo you didn\\'t, you ain\\'t \\nprovin\\'it.\\n\\nVICTOR \\nI aint gotta prove nothin\\' \\nto no girl, \\'cause I got \\nit like dat.\\n\\nAMANDA \\nOh, \\'cause you got it like \\ndat?\\n\\nVictor approaches Amanda. He touches her arm. Amanda \\nsmiles.\\n\\nShe takes Victor\\'s hand and places it on her breast. Victor \\nmoves forward. Amanda moves his hand over her breasts. She \\nwraps her arms around his waist. Victor bends his arms \\naround her back.\\n\\nAmanda hisses him on the lips, slowly. A long, deep kiss. \\nAs she kisses him she runs her hand through his hair. She \\npulls back. Victor looks around. Chris is at the entrance \\nof the alleyway, watching them. He is holding his deflated football.\\n\\nChris looks at him for a second and walks away.\\n\\nChris walks down the block, his bat against the pavement.\\n\\nFADE OUT\\n\\n\\n')]" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "query = \"victor\"\n", + "\n", + "retriever.invoke(query)" + ] + }, + { + "cell_type": "markdown", + "id": "dfe8aad4-8626-4330-98a9-7ea1ca5d2e0e", + "metadata": {}, + "source": [ + "## Use within a chain\n", + "\n", + "Like other retrievers, BoxRetriever can be incorporated into LLM applications via [chains](/docs/how_to/sequence/).\n", + "\n", + "We will need a LLM or chat model:\n", + "\n", + "```{=mdx}\n", + "import ChatModelTabs from \"@theme/ChatModelTabs\";\n", + "\n", + "\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "25b647a3-f8f2-4541-a289-7a241e43f9df", + "metadata": {}, + "outputs": [], + "source": [ + "# | output: false\n", + "# | echo: false\n", + "\n", + "from langchain_openai import ChatOpenAI\n", + "\n", + "llm = ChatOpenAI(model=\"gpt-3.5-turbo-0125\", temperature=0)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "23e11cc9-abd6-4855-a7eb-799f45ca01ae", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_core.output_parsers import StrOutputParser\n", + "from langchain_core.prompts import ChatPromptTemplate\n", + "from langchain_core.runnables import RunnablePassthrough\n", + "\n", + "retriever = BoxRetriever(box_developer_token=box_developer_token, character_limit=10000)\n", + "\n", + "context = (\n", + " \"You are an actor reading scripts to learn about your role in an upcoming movie.\"\n", + ")\n", + "question = \"describe the character Victor\"\n", + "\n", + "prompt = ChatPromptTemplate.from_template(\n", + " \"\"\"Answer the question based only on the context provided.\n", + "\n", + " Context: {context}\n", + "\n", + " Question: {question}\"\"\"\n", + ")\n", + "\n", + "\n", + "def format_docs(docs):\n", + " return \"\\n\\n\".join(doc.page_content for doc in docs)\n", + "\n", + "\n", + "chain = (\n", + " {\"context\": retriever | format_docs, \"question\": RunnablePassthrough()}\n", + " | prompt\n", + " | llm\n", + " | StrOutputParser()\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "d47c37dd-5c11-416c-a3b6-bec413cd70e8", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Victor is a skinny 12-year-old with sloppy hair who is seen sleeping on his fire escape in the sun. He is hesitant to go to the pool with his friend Carlos because he is afraid of getting in trouble for not letting his mother cut his hair. Ultimately, he decides to go to the pool with Carlos.'" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "chain.invoke(\"victor\")" + ] + }, + { + "cell_type": "markdown", + "id": "3a5bb5ca-c3ae-4a58-be67-2cd18574b9a3", + "metadata": {}, + "source": [ + "## API reference\n", + "\n", + "For detailed documentation of all BoxRetriever features and configurations head to the [API reference](https://api.python.langchain.com/en/latest/retrievers/langchain_box.retrievers.Box.BoxRetriever.html).\n", + "\n", + "\n", + "## Help\n", + "\n", + "If you have questions, you can check out our [developer documentation](https://developer.box.com) or reach out to use in our [developer community](https://community.box.com)." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/libs/partners/box/langchain_box/__init__.py b/libs/partners/box/langchain_box/__init__.py index 9959317438b08..61bca92a6fb98 100644 --- a/libs/partners/box/langchain_box/__init__.py +++ b/libs/partners/box/langchain_box/__init__.py @@ -1,7 +1,8 @@ from importlib import metadata from langchain_box.document_loaders import BoxLoader -from langchain_box.utilities import BoxAPIWrapper, BoxAuth, BoxAuthType +from langchain_box.retrievers import BoxRetriever +from langchain_box.utilities import BoxAuth, BoxAuthType, _BoxAPIWrapper try: __version__ = metadata.version(__package__) @@ -12,8 +13,9 @@ __all__ = [ "BoxLoader", + "BoxRetriever", "BoxAuth", "BoxAuthType", - "BoxAPIWrapper", + "_BoxAPIWrapper", "__version__", ] diff --git a/libs/partners/box/langchain_box/document_loaders/box.py b/libs/partners/box/langchain_box/document_loaders/box.py index 05b2daf717766..d5c3f6977542a 100644 --- a/libs/partners/box/langchain_box/document_loaders/box.py +++ b/libs/partners/box/langchain_box/document_loaders/box.py @@ -3,14 +3,14 @@ from box_sdk_gen import FileBaseTypeField # type: ignore from langchain_core.document_loaders.base import BaseLoader from langchain_core.documents import Document -from langchain_core.pydantic_v1 import BaseModel, ConfigDict, root_validator +from langchain_core.pydantic_v1 import BaseModel, root_validator +from langchain_core.utils import get_from_dict_or_env -from langchain_box.utilities import BoxAPIWrapper, BoxAuth +from langchain_box.utilities import BoxAuth, _BoxAPIWrapper class BoxLoader(BaseLoader, BaseModel): - """ - BoxLoader + """BoxLoader. This class will help you load files from your Box instance. You must have a Box account. If you need one, you can sign up for a free developer account. @@ -33,18 +33,18 @@ class BoxLoader(BaseLoader, BaseModel): pip install -U langchain-box export BOX_DEVELOPER_TOKEN="your-api-key" + This loader returns ``Document `` objects built from text representations of files in Box. It will skip any document without a text representation available. You can provide either a ``List[str]`` containing Box file IDS, or you can provide a ``str`` contining a Box folder ID. If providing a folder ID, you can also enable recursive mode to get the full tree under that folder. - :::info + .. note:: A Box instance can contain Petabytes of files, and folders can contain millions of files. Be intentional when choosing what folders you choose to index. And we recommend never getting all files from folder 0 recursively. Folder ID 0 is your root folder. - ::: Instantiate: @@ -121,32 +121,36 @@ class BoxLoader(BaseLoader, BaseModel): Terrarium: $120\nTotal: $920') """ - model_config = ConfigDict(use_enum_values=True) - - """String containing the Box Developer Token generated in the developer console""" box_developer_token: Optional[str] = None - """Configured langchain_box.utilities.BoxAuth object""" + """String containing the Box Developer Token generated in the developer console""" + box_auth: Optional[BoxAuth] = None - """List[str] containing Box file ids""" + """Configured langchain_box.utilities.BoxAuth object""" + box_file_ids: Optional[List[str]] = None - """String containing box folder id to load files from""" + """List[str] containing Box file ids""" + box_folder_id: Optional[str] = None + """String containing box folder id to load files from""" + + recursive: Optional[bool] = False """If getting files by folder id, recursive is a bool to determine if you wish to traverse subfolders to return child documents. Default is False""" - recursive: Optional[bool] = False + + character_limit: Optional[int] = -1 """character_limit is an int that caps the number of characters to return per document.""" - character_limit: Optional[int] = -1 - box: Optional[BoxAPIWrapper] + _box: Optional[_BoxAPIWrapper] class Config: arbitrary_types_allowed = True extra = "allow" + use_enum_values = True @root_validator(allow_reuse=True) def validate_box_loader_inputs(cls, values: Dict[str, Any]) -> Dict[str, Any]: - box = None + _box = None """Validate that has either box_file_ids or box_folder_id.""" if not values.get("box_file_ids") and not values.get("box_folder_id"): @@ -159,19 +163,30 @@ def validate_box_loader_inputs(cls, values: Dict[str, Any]) -> Dict[str, Any]: ) """Validate that we have either a box_developer_token or box_auth.""" - if not values.get("box_auth") and not values.get("box_developer_token"): - raise ValueError( - "you must provide box_developer_token or a box_auth " - "generated with langchain_box.utilities.BoxAuth" + if not values.get("box_auth"): + if not get_from_dict_or_env( + values, "box_developer_token", "BOX_DEVELOPER_TOKEN" + ): + raise ValueError( + "you must provide box_developer_token or a box_auth " + "generated with langchain_box.utilities.BoxAuth" + ) + else: + token = get_from_dict_or_env( + values, "box_developer_token", "BOX_DEVELOPER_TOKEN" + ) + + _box = _BoxAPIWrapper( # type: ignore[call-arg] + box_developer_token=token, + character_limit=values.get("character_limit"), + ) + else: + _box = _BoxAPIWrapper( # type: ignore[call-arg] + box_auth=values.get("box_auth"), + character_limit=values.get("character_limit"), ) - box = BoxAPIWrapper( # type: ignore[call-arg] - box_developer_token=values.get("box_developer_token"), - box_auth=values.get("box_auth"), - character_limit=values.get("character_limit"), - ) - - values["box"] = box + values["_box"] = _box return values @@ -181,7 +196,7 @@ def _get_files_from_folder(self, folder_id): # type: ignore[no-untyped-def] for file in folder_content: try: if file.type == FileBaseTypeField.FILE: - doc = self.box.get_document_by_file_id(file.id) + doc = self._box.get_document_by_file_id(file.id) if doc is not None: yield doc @@ -199,7 +214,7 @@ def lazy_load(self) -> Iterator[Document]: if self.box_file_ids: for file_id in self.box_file_ids: try: - file = self.box.get_document_by_file_id(file_id) # type: ignore[union-attr] + file = self._box.get_document_by_file_id(file_id) # type: ignore[union-attr] if file is not None: yield file diff --git a/libs/partners/box/langchain_box/retrievers/__init__.py b/libs/partners/box/langchain_box/retrievers/__init__.py new file mode 100644 index 0000000000000..8a8ac5dc4c213 --- /dev/null +++ b/libs/partners/box/langchain_box/retrievers/__init__.py @@ -0,0 +1,5 @@ +"""Box Document Loaders.""" + +from langchain_box.retrievers.box import BoxRetriever + +__all__ = ["BoxRetriever"] diff --git a/libs/partners/box/langchain_box/retrievers/box.py b/libs/partners/box/langchain_box/retrievers/box.py new file mode 100644 index 0000000000000..5db19c693c4e1 --- /dev/null +++ b/libs/partners/box/langchain_box/retrievers/box.py @@ -0,0 +1,158 @@ +from typing import Any, Dict, List, Optional + +from langchain_core.callbacks import CallbackManagerForRetrieverRun +from langchain_core.documents import Document +from langchain_core.pydantic_v1 import root_validator +from langchain_core.retrievers import BaseRetriever + +from langchain_box.utilities import BoxAuth, _BoxAPIWrapper + + +class BoxRetriever(BaseRetriever): + """Box retriever. + + `BoxRetriever` provides the ability to retrieve content from + your Box instance in a couple of ways. + + 1. You can use the Box full-text search to retrieve the + complete document(s) that match your search query, as + `List[Document]` + 2. You can use the Box AI Platform API to retrieve the results + from a Box AI prompt. This can be a `Document` containing + the result of the prompt, or you can retrieve the citations + used to generate the prompt to include in your vectorstore. + + Setup: + Install ``langchain-box``: + + .. code-block:: bash + + pip install -U langchain-box + + Instantiate: + + To use search: + .. code-block:: python + + from langchain_box.retrievers import BoxRetriever + + retriever = BoxRetriever() + + To use Box AI: + .. code-block:: python + + from langchain_box.retrievers import BoxRetriever + + file_ids=["12345","67890"] + + retriever = BoxRetriever(file_ids) + + + Usage: + .. code-block:: python + + retriever = BoxRetriever() + retriever.invoke("victor") + print(docs[0].page_content[:100]) + + .. code-block:: none + + [ + Document( + metadata={ + 'source': 'url', + 'title': 'FIVE_FEET_AND_RISING_by_Peter_Sollett_pdf' + }, + page_content='\\n3/20/23, 5:31 PM F...' + ) + ] + + Use within a chain: + .. code-block:: python + + from langchain_core.output_parsers import StrOutputParser + from langchain_core.prompts import ChatPromptTemplate + from langchain_core.runnables import RunnablePassthrough + from langchain_openai import ChatOpenAI + + retriever = BoxRetriever(box_developer_token=box_developer_token, character_limit=10000) + + context="You are an actor reading scripts to learn about your role in an upcoming movie." + question="describe the character Victor" + + prompt = ChatPromptTemplate.from_template( + \"""Answer the question based only on the context provided. + + Context: {context} + + Question: {question}\""" + ) + + def format_docs(docs): + return "\\n\\n".join(doc.page_content for doc in docs) + + chain = ( + {"context": retriever | format_docs, "question": RunnablePassthrough()} + | prompt + | llm + | StrOutputParser() + ) + + chain.invoke("Victor") # search query to find files in Box + ) + + .. code-block:: none + + 'Victor is a skinny 12-year-old with sloppy hair who is seen + sleeping on his fire escape in the sun. He is hesitant to go to + the pool with his friend Carlos because he is afraid of getting + in trouble for not letting his mother cut his hair. Ultimately, + he decides to go to the pool with Carlos.' + """ # noqa: E501 + + box_developer_token: Optional[str] = None + """String containing the Box Developer Token generated in the developer console""" + + box_auth: Optional[BoxAuth] = None + """Configured langchain_box.utilities.BoxAuth object""" + + box_file_ids: Optional[List[str]] = None + """List[str] containing Box file ids""" + character_limit: Optional[int] = -1 + """character_limit is an int that caps the number of characters to + return per document.""" + + _box: Optional[_BoxAPIWrapper] + + class Config: + arbitrary_types_allowed = True + extra = "allow" + + @root_validator(allow_reuse=True) + def validate_box_loader_inputs(cls, values: Dict[str, Any]) -> Dict[str, Any]: + _box = None + + """Validate that we have either a box_developer_token or box_auth.""" + if not values.get("box_auth") and not values.get("box_developer_token"): + raise ValueError( + "you must provide box_developer_token or a box_auth " + "generated with langchain_box.utilities.BoxAuth" + ) + + _box = _BoxAPIWrapper( # type: ignore[call-arg] + box_developer_token=values.get("box_developer_token"), + box_auth=values.get("box_auth"), + character_limit=values.get("character_limit"), + ) + + values["_box"] = _box + + return values + + def _get_relevant_documents( + self, query: str, *, run_manager: CallbackManagerForRetrieverRun + ) -> List[Document]: + if self.box_file_ids: # If using Box AI + return self._box.ask_box_ai(query=query, box_file_ids=self.box_file_ids) # type: ignore[union-attr] + else: # If using Search + return self._box.search_box(query=query) # type: ignore[union-attr] diff --git a/libs/partners/box/langchain_box/utilities/__init__.py b/libs/partners/box/langchain_box/utilities/__init__.py index 91bb3148d2b69..80aca95f4789b 100644 --- a/libs/partners/box/langchain_box/utilities/__init__.py +++ b/libs/partners/box/langchain_box/utilities/__init__.py @@ -1,5 +1,5 @@ """Box API Utilities.""" -from langchain_box.utilities.box import BoxAPIWrapper, BoxAuth, BoxAuthType +from langchain_box.utilities.box import BoxAuth, BoxAuthType, _BoxAPIWrapper -__all__ = ["BoxAuth", "BoxAuthType", "BoxAPIWrapper"] +__all__ = ["BoxAuth", "BoxAuthType", "_BoxAPIWrapper"] diff --git a/libs/partners/box/langchain_box/utilities/box.py b/libs/partners/box/langchain_box/utilities/box.py index 06f4080d15c88..65c0cd92fb3d3 100644 --- a/libs/partners/box/langchain_box/utilities/box.py +++ b/libs/partners/box/langchain_box/utilities/box.py @@ -1,7 +1,7 @@ """Util that calls Box APIs.""" from enum import Enum -from typing import Any, Dict, Optional +from typing import Any, Dict, List, Optional import box_sdk_gen # type: ignore import requests @@ -11,6 +11,13 @@ class DocumentFiles(Enum): + """DocumentFiles(Enum). + + An enum containing all of the supported extensions for files + Box considers Documents. These files should have text + representations. + """ + DOC = "doc" DOCX = "docx" GDOC = "gdoc" @@ -89,6 +96,12 @@ class DocumentFiles(Enum): class ImageFiles(Enum): + """ImageFiles(Enum). + + An enum containing all of the supported extensions for files + Box considers images. + """ + ARW = "arw" BMP = "bmp" CR2 = "cr2" @@ -115,8 +128,9 @@ class ImageFiles(Enum): SVS = "svs" -""" - BoxAuthType +class BoxAuthType(Enum): + """BoxAuthType(Enum). + an enum to tell BoxLoader how you wish to autheticate your Box connection. Options are: @@ -128,22 +142,23 @@ class ImageFiles(Enum): and `box_enterprise_id` or optionally `box_user_id`. JWT - Use JWT for authentication. Config should be stored on the file system accessible to your app. - provide `box_jwt_path`. Optionally, provide `box_user_id` to + provide `box_jwt_path`. Optionally, provide `box_user_id` to act as a specific user -""" + """ - -class BoxAuthType(Enum): + TOKEN = "token" """Use a developer token or a token retrieved from box-sdk-gen""" - TOKEN = "token" - """Use `client_credentials` type grant""" CCG = "ccg" - """Use JWT bearer token auth""" + """Use `client_credentials` type grant""" + JWT = "jwt" + """Use JWT bearer token auth""" -""" +class BoxAuth(BaseModel): + """BoxAuth. + `BoxAuth` supports the following authentication methods: * Token — either a developer token or any token generated through the Box SDK @@ -152,16 +167,15 @@ class BoxAuthType(Enum): * CCG with a service account * CCG with a specified user - :::note - If using JWT authentication, you will need to download the configuration from the - Box developer console after generating your public/private key pair. Place this - file in your application directory structure somewhere. You will use the path to + .. note:: + If using JWT authentication, you will need to download the configuration from the + Box developer console after generating your public/private key pair. Place this + file in your application directory structure somewhere. You will use the path to this file when using the `BoxAuth` helper class. - ::: - For more information, learn about how to + For more information, learn about how to [set up a Box application](https://developer.box.com/guides/getting-started/first-application/), - and check out the + and check out the [Box authentication guide](https://developer.box.com/guides/authentication/select/) for more about our different authentication options. @@ -169,7 +183,7 @@ class BoxAuthType(Enum): To instantiate, you must provide a ``langchain_box.utilities.BoxAuthType``. - BoxAuthType is an enum to tell BoxLoader how you wish to autheticate your + BoxAuthType is an enum to tell BoxLoader how you wish to autheticate your Box connection. Options are: @@ -181,7 +195,7 @@ class BoxAuthType(Enum): and `box_enterprise_id` or optionally `box_user_id`. JWT - Use JWT for authentication. Config should be stored on the file system accessible to your app. - provide `box_jwt_path`. Optionally, provide `box_user_id` to + provide `box_jwt_path`. Optionally, provide `box_user_id` to act as a specific user .. code-block:: python @@ -198,36 +212,40 @@ class BoxAuthType(Enum): ... ) - To see examples for each supported authentication methodology, visit the - [Box providers](/docs/integrations/providers/box) page. If you want to - use OAuth 2.0 `authorization_code` flow, use - [box-sdk-gen](https://github.com/box/box-python-sdk-gen) SDK, get your + To see examples for each supported authentication methodology, visit the + [Box providers](/docs/integrations/providers/box) page. If you want to + use OAuth 2.0 `authorization_code` flow, use + [box-sdk-gen](https://github.com/box/box-python-sdk-gen) SDK, get your token, and use `BoxAuthType.TOKEN` type. -""" - - -class BoxAuth(BaseModel): - """Authentication type to use. Must pass BoxAuthType enum""" + """ auth_type: BoxAuthType - """ If using BoxAuthType.TOKEN, provide your token here""" + """langchain_box.utilities.BoxAuthType. Enum describing how to + authenticate against Box""" + box_developer_token: Optional[str] = None + """ If using BoxAuthType.TOKEN, provide your token here""" + + box_jwt_path: Optional[str] = None """If using BoxAuthType.JWT, provide local path to your JWT configuration file""" - box_jwt_path: Optional[str] = None - """If using BoxAuthType.CCG, provide your app's client ID""" + box_client_id: Optional[str] = None - """If using BoxAuthType.CCG, provide your app's client secret""" + """If using BoxAuthType.CCG, provide your app's client ID""" + box_client_secret: Optional[str] = None + """If using BoxAuthType.CCG, provide your app's client secret""" + + box_enterprise_id: Optional[str] = None """If using BoxAuthType.CCG, provide your enterprise ID. Only required if you are not sending `box_user_id`""" - box_enterprise_id: Optional[str] = None + + box_user_id: Optional[str] = None """If using BoxAuthType.CCG or BoxAuthType.JWT, providing `box_user_id` will act on behalf of a specific user""" - box_user_id: Optional[str] = None - box_client: Optional[box_sdk_gen.BoxClient] = None - custom_header: Dict = dict({"x-box-ai-library": "langchain"}) + _box_client: Optional[box_sdk_gen.BoxClient] = None + _custom_header: Dict = dict({"x-box-ai-library": "langchain"}) class Config: arbitrary_types_allowed = True @@ -276,16 +294,16 @@ def validate_box_auth_inputs(cls, values: Dict[str, Any]) -> Dict[str, Any]: return values - def authorize(self) -> None: + def _authorize(self) -> None: match self.auth_type: case "token": try: auth = box_sdk_gen.BoxDeveloperTokenAuth( token=self.box_developer_token ) - self.box_client = box_sdk_gen.BoxClient( + self._box_client = box_sdk_gen.BoxClient( auth=auth - ).with_extra_headers(extra_headers=self.custom_header) + ).with_extra_headers(extra_headers=self._custom_header) except box_sdk_gen.BoxSDKError as bse: raise RuntimeError( @@ -304,15 +322,15 @@ def authorize(self) -> None: ) auth = box_sdk_gen.BoxJWTAuth(config=jwt_config) - self.box_client = box_sdk_gen.BoxClient( + self._box_client = box_sdk_gen.BoxClient( auth=auth - ).with_extra_headers(extra_headers=self.custom_header) + ).with_extra_headers(extra_headers=self._custom_header) if self.box_user_id is not None: user_auth = auth.with_user_subject(self.box_user_id) - self.box_client = box_sdk_gen.BoxClient( + self._box_client = box_sdk_gen.BoxClient( auth=user_auth - ).with_extra_headers(extra_headers=self.custom_header) + ).with_extra_headers(extra_headers=self._custom_header) except box_sdk_gen.BoxSDKError as bse: raise RuntimeError( @@ -340,9 +358,9 @@ def authorize(self) -> None: ) auth = box_sdk_gen.BoxCCGAuth(config=ccg_config) - self.box_client = box_sdk_gen.BoxClient( + self._box_client = box_sdk_gen.BoxClient( auth=auth - ).with_extra_headers(extra_headers=self.custom_header) + ).with_extra_headers(extra_headers=self._custom_header) except box_sdk_gen.BoxSDKError as bse: raise RuntimeError( @@ -363,25 +381,26 @@ def authorize(self) -> None: def get_client(self) -> box_sdk_gen.BoxClient: """Instantiate the Box SDK.""" - if self.box_client is None: - self.authorize() + if self._box_client is None: + self._authorize() - return self.box_client + return self._box_client -class BoxAPIWrapper(BaseModel): +class _BoxAPIWrapper(BaseModel): """Wrapper for Box API.""" - """String containing the Box Developer Token generated in the developer console""" box_developer_token: Optional[str] = None - """Configured langchain_box.utilities.BoxAuth object""" + """String containing the Box Developer Token generated in the developer console""" + box_auth: Optional[BoxAuth] = None + """Configured langchain_box.utilities.BoxAuth object""" + + character_limit: Optional[int] = -1 """character_limit is an int that caps the number of characters to return per document.""" - character_limit: Optional[int] = -1 - box: Optional[box_sdk_gen.BoxClient] - file_count: int = 0 + _box: Optional[box_sdk_gen.BoxClient] class Config: arbitrary_types_allowed = True @@ -390,7 +409,7 @@ class Config: @root_validator(allow_reuse=True) def validate_box_api_inputs(cls, values: Dict[str, Any]) -> Dict[str, Any]: - values["box"] = None + values["_box"] = None """Validate that TOKEN auth type provides box_developer_token.""" if not values.get("box_auth"): @@ -402,7 +421,7 @@ def validate_box_api_inputs(cls, values: Dict[str, Any]) -> Dict[str, Any]: ) else: box_auth = values.get("box_auth") - values["box"] = box_auth.get_client() # type: ignore[union-attr] + values["_box"] = box_auth.get_client() # type: ignore[union-attr] return values @@ -411,11 +430,11 @@ def get_box_client(self) -> box_sdk_gen.BoxClient: auth_type=BoxAuthType.TOKEN, box_developer_token=self.box_developer_token ) - self.box = box_auth.get_client() + self._box = box_auth.get_client() def _do_request(self, url: str) -> Any: try: - access_token = self.box.auth.retrieve_token().access_token # type: ignore[union-attr] + access_token = self._box.auth.retrieve_token().access_token # type: ignore[union-attr] except box_sdk_gen.BoxSDKError as bse: raise RuntimeError(f"Error getting client from jwt token: {bse.message}") @@ -423,38 +442,17 @@ def _do_request(self, url: str) -> Any: resp.raise_for_status() return resp.content - def get_folder_items(self, folder_id: str) -> box_sdk_gen.Items: - """Get all the items in a folder. Accepts folder_id as str. - returns box_sdk_gen.Items""" - if self.box is None: - self.get_box_client() - - try: - folder_contents = self.box.folders.get_folder_items( # type: ignore[union-attr] - folder_id, fields=["id", "type", "name"] - ) - except box_sdk_gen.BoxAPIError as bae: - raise RuntimeError( - f"BoxAPIError: Error getting folder content: {bae.message}" - ) - except box_sdk_gen.BoxSDKError as bse: - raise RuntimeError( - f"BoxSDKError: Error getting folder content: {bse.message}" - ) - - return folder_contents.entries - - def get_text_representation(self, file_id: str = "") -> tuple[str, str, str]: + def _get_text_representation(self, file_id: str = "") -> tuple[str, str, str]: try: from box_sdk_gen import BoxAPIError, BoxSDKError except ImportError: raise ImportError("You must run `pip install box-sdk-gen`") - if self.box is None: + if self._box is None: self.get_box_client() try: - file = self.box.files.get_file_by_id( # type: ignore[union-attr] + file = self._box.files.get_file_by_id( # type: ignore[union-attr] file_id, x_rep_hints="[extracted_text]", fields=["name", "representations", "type"], @@ -486,8 +484,10 @@ def get_text_representation(self, file_id: str = "") -> tuple[str, str, str]: except requests.exceptions.HTTPError: return None, None, None # type: ignore[return-value] - if self.character_limit > 0: # type: ignore[operator] - content = raw_content[0 : self.character_limit] + if ( + self.character_limit is not None and self.character_limit > 0 # type: ignore[operator] + ): + content = raw_content[0 : (self.character_limit - 1)] else: content = raw_content @@ -499,16 +499,16 @@ def get_document_by_file_id(self, file_id: str) -> Optional[Document]: """Load a file from a Box id. Accepts file_id as str. Returns `Document`""" - if self.box is None: + if self._box is None: self.get_box_client() - file = self.box.files.get_file_by_id( # type: ignore[union-attr] + file = self._box.files.get_file_by_id( # type: ignore[union-attr] file_id, fields=["name", "type", "extension"] ) if file.type == "file": if hasattr(DocumentFiles, file.extension.upper()): - file_name, content, url = self.get_text_representation(file_id=file_id) + file_name, content, url = self._get_text_representation(file_id=file_id) if file_name is None or content is None or url is None: return None @@ -523,3 +523,95 @@ def get_document_by_file_id(self, file_id: str) -> Optional[Document]: return None return None + + def get_folder_items(self, folder_id: str) -> box_sdk_gen.Items: + """Get all the items in a folder. Accepts folder_id as str. + returns box_sdk_gen.Items""" + if self._box is None: + self.get_box_client() + + try: + folder_contents = self._box.folders.get_folder_items( # type: ignore[union-attr] + folder_id, fields=["id", "type", "name"] + ) + except box_sdk_gen.BoxAPIError as bae: + raise RuntimeError( + f"BoxAPIError: Error getting folder content: {bae.message}" + ) + except box_sdk_gen.BoxSDKError as bse: + raise RuntimeError( + f"BoxSDKError: Error getting folder content: {bse.message}" + ) + + return folder_contents.entries + + def search_box(self, query: str) -> List[Document]: + if self._box is None: + self.get_box_client() + + files = [] + + try: + results = self._box.search.search_for_content( # type: ignore[union-attr] + query=query, fields=["id", "type", "extension"] + ) + + if results.entries is None or len(results.entries) <= 0: + return None # type: ignore[return-value] + + for file in results.entries: + if ( + file is not None + and file.type == "file" + and hasattr(DocumentFiles, file.extension.upper()) + ): + doc = self.get_document_by_file_id(file.id) + + if doc is not None: + files.append(doc) + + return files + except box_sdk_gen.BoxAPIError as bae: + raise RuntimeError( + f"BoxAPIError: Error getting search results: {bae.message}" + ) + except box_sdk_gen.BoxSDKError as bse: + raise RuntimeError( + f"BoxSDKError: Error getting search results: {bse.message}" + ) + + def ask_box_ai(self, query: str, box_file_ids: List[str]) -> List[Document]: + if self._box is None: + self.get_box_client() + + ai_mode = box_sdk_gen.CreateAiAskMode.SINGLE_ITEM_QA.value + + if len(box_file_ids) > 1: + ai_mode = box_sdk_gen.CreateAiAskMode.MULTIPLE_ITEM_QA.value + elif len(box_file_ids) <= 0: + raise ValueError("BOX_AI_ASK requires at least one file ID") + + items = [] + + for file_id in box_file_ids: + item = box_sdk_gen.CreateAiAskItems( + id=file_id, type=box_sdk_gen.CreateAiAskItemsTypeField.FILE.value + ) + items.append(item) + + try: + response = self._box.ai.create_ai_ask(ai_mode, query, items) # type: ignore[union-attr] + except box_sdk_gen.BoxAPIError as bae: + raise RuntimeError( + f"BoxAPIError: Error getting Box AI result: {bae.message}" + ) + except box_sdk_gen.BoxSDKError as bse: + raise RuntimeError( + f"BoxSDKError: Error getting Box AI result: {bse.message}" + ) + + content = response.answer + + metadata = {"source": "Box AI", "title": f"Box AI {query}"} + + return [Document(page_content=content, metadata=metadata)] diff --git a/libs/partners/box/tests/integration_tests/document_loaders/test_box_file_loader.py b/libs/partners/box/tests/integration_tests/document_loaders/test_box_file_loader.py index 0146b60dcf615..4adbe7757400a 100644 --- a/libs/partners/box/tests/integration_tests/document_loaders/test_box_file_loader.py +++ b/libs/partners/box/tests/integration_tests/document_loaders/test_box_file_loader.py @@ -1,42 +1,3 @@ -from langchain_core.documents import Document -from pytest_mock import MockerFixture - -from langchain_box.document_loaders import BoxLoader - - -# test Document retrieval -def test_file_load(mocker: MockerFixture) -> None: - mocker.patch( - "langchain_box.utilities.BoxAPIWrapper.get_document_by_file_id", return_value=[] - ) - - loader = BoxLoader( # type: ignore[call-arg] - box_developer_token="box_developer_token", - box_file_ids=["box_file_ids"], - ) - - documents = loader.load() - assert documents - - mocker.patch( - "langchain_box.utilities.BoxAPIWrapper.get_document_by_file_id", - return_value=( - Document( - page_content="Test file mode\ndocument contents", - metadata={"title": "Testing Files"}, - ) - ), - ) - - loader = BoxLoader( # type: ignore[call-arg] - box_developer_token="box_developer_token", - box_file_ids=["box_file_ids"], - ) - - documents = loader.load() - assert documents == [ - Document( - page_content="Test file mode\ndocument contents", - metadata={"title": "Testing Files"}, - ) - ] +""" +TODO: build live integration tests +""" diff --git a/libs/partners/box/tests/integration_tests/retrievers/__init__.py b/libs/partners/box/tests/integration_tests/retrievers/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/box/tests/integration_tests/retrievers/test_box_retriever.py b/libs/partners/box/tests/integration_tests/retrievers/test_box_retriever.py new file mode 100644 index 0000000000000..4adbe7757400a --- /dev/null +++ b/libs/partners/box/tests/integration_tests/retrievers/test_box_retriever.py @@ -0,0 +1,3 @@ +""" +TODO: build live integration tests +""" diff --git a/libs/partners/box/tests/integration_tests/utilities/test_box_util.py b/libs/partners/box/tests/integration_tests/utilities/test_box_util.py index 6403b2f54105e..4adbe7757400a 100644 --- a/libs/partners/box/tests/integration_tests/utilities/test_box_util.py +++ b/libs/partners/box/tests/integration_tests/utilities/test_box_util.py @@ -1,47 +1,3 @@ -from unittest.mock import Mock - -import pytest -from langchain_core.documents import Document -from pytest_mock import MockerFixture - -from langchain_box.utilities import BoxAPIWrapper - - -@pytest.fixture() -def mock_worker(mocker: MockerFixture) -> None: - mocker.patch("langchain_box.utilities.BoxAuth.authorize", return_value=Mock()) - mocker.patch("langchain_box.utilities.BoxAuth.get_client", return_value=Mock()) - mocker.patch( - "langchain_box.utilities.BoxAPIWrapper.get_text_representation", - return_value=("filename", "content", "url"), - ) - - -def test_get_documents_by_file_ids(mock_worker, mocker: MockerFixture) -> None: # type: ignore[no-untyped-def] - mocker.patch( - "langchain_box.utilities.BoxAPIWrapper.get_document_by_file_id", - return_value=( - Document( - page_content="content", metadata={"source": "url", "title": "filename"} - ) - ), - ) - - box = BoxAPIWrapper(box_developer_token="box_developer_token") # type: ignore[call-arg] - - documents = box.get_document_by_file_id("box_file_id") - assert documents == Document( - page_content="content", metadata={"source": "url", "title": "filename"} - ) - - -def test_get_documents_by_folder_id(mock_worker, mocker: MockerFixture) -> None: # type: ignore[no-untyped-def] - mocker.patch( - "langchain_box.utilities.BoxAPIWrapper.get_folder_items", - return_value=([{"id": "file_id", "type": "file"}]), - ) - - box = BoxAPIWrapper(box_developer_token="box_developer_token") # type: ignore[call-arg] - - folder_contents = box.get_folder_items("box_folder_id") - assert folder_contents == [{"id": "file_id", "type": "file"}] +""" +TODO: build live integration tests +""" diff --git a/libs/partners/box/tests/unit_tests/document_loaders/test_box_loader.py b/libs/partners/box/tests/unit_tests/document_loaders/test_box_loader.py index 96e46f15e8be6..101913bcea00d 100644 --- a/libs/partners/box/tests/unit_tests/document_loaders/test_box_loader.py +++ b/libs/partners/box/tests/unit_tests/document_loaders/test_box_loader.py @@ -1,4 +1,6 @@ import pytest +from langchain_core.documents import Document +from pytest_mock import MockerFixture from langchain_box.document_loaders import BoxLoader from langchain_box.utilities import BoxAuth, BoxAuthType @@ -56,3 +58,42 @@ def test_failed_initialization_files_and_folders() -> None: box_folder_id="box_folder_id", box_file_ids=["box_file_ids"], ) + + +# test Document retrieval +def test_file_load(mocker: MockerFixture) -> None: + mocker.patch( + "langchain_box.utilities._BoxAPIWrapper.get_document_by_file_id", + return_value=[], + ) + + loader = BoxLoader( # type: ignore[call-arg] + box_developer_token="box_developer_token", + box_file_ids=["box_file_ids"], + ) + + documents = loader.load() + assert documents + + mocker.patch( + "langchain_box.utilities._BoxAPIWrapper.get_document_by_file_id", + return_value=( + Document( + page_content="Test file mode\ndocument contents", + metadata={"title": "Testing Files"}, + ) + ), + ) + + loader = BoxLoader( # type: ignore[call-arg] + box_developer_token="box_developer_token", + box_file_ids=["box_file_ids"], + ) + + documents = loader.load() + assert documents == [ + Document( + page_content="Test file mode\ndocument contents", + metadata={"title": "Testing Files"}, + ) + ] diff --git a/libs/partners/box/tests/unit_tests/retrievers/__init__.py b/libs/partners/box/tests/unit_tests/retrievers/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/box/tests/unit_tests/retrievers/test_box_retriever.py b/libs/partners/box/tests/unit_tests/retrievers/test_box_retriever.py new file mode 100644 index 0000000000000..1dc8746b771a6 --- /dev/null +++ b/libs/partners/box/tests/unit_tests/retrievers/test_box_retriever.py @@ -0,0 +1,89 @@ +import pytest +from langchain_core.documents import Document +from pytest_mock import MockerFixture + +from langchain_box.retrievers import BoxRetriever +from langchain_box.utilities import BoxAuth, BoxAuthType + + +# Test auth types +def test_direct_token_initialization() -> None: + retriever = BoxRetriever( # type: ignore[call-arg] + box_developer_token="box_developer_token", + box_file_ids=["box_file_ids"], + ) + + assert retriever.box_developer_token == "box_developer_token" + assert retriever.box_file_ids == ["box_file_ids"] + + +def test_failed_direct_token_initialization() -> None: + with pytest.raises(ValueError): + retriever = BoxRetriever(box_file_ids=["box_file_ids"]) # type: ignore[call-arg] # noqa: F841 + + +def test_auth_initialization() -> None: + auth = BoxAuth( + auth_type=BoxAuthType.TOKEN, box_developer_token="box_developer_token" + ) + + retriever = BoxRetriever( # type: ignore[call-arg] + box_auth=auth, + box_file_ids=["box_file_ids"], + ) + + assert retriever.box_file_ids == ["box_file_ids"] + + +# test search retrieval +def test_search(mocker: MockerFixture) -> None: + mocker.patch( + "langchain_box.utilities._BoxAPIWrapper.search_box", + return_value=( + [ + Document( + page_content="Test file mode\ndocument contents", + metadata={"title": "Testing Files"}, + ) + ] + ), + ) + + retriever = BoxRetriever( # type: ignore[call-arg] + box_developer_token="box_developer_token" + ) + + documents = retriever.invoke("query") + assert documents == [ + Document( + page_content="Test file mode\ndocument contents", + metadata={"title": "Testing Files"}, + ) + ] + + +# test ai retrieval +def test_ai(mocker: MockerFixture) -> None: + mocker.patch( + "langchain_box.utilities._BoxAPIWrapper.ask_box_ai", + return_value=( + [ + Document( + page_content="Test file mode\ndocument contents", + metadata={"title": "Testing Files"}, + ) + ] + ), + ) + + retriever = BoxRetriever( # type: ignore[call-arg] + box_developer_token="box_developer_token", box_file_ids=["box_file_ids"] + ) + + documents = retriever.invoke("query") + assert documents == [ + Document( + page_content="Test file mode\ndocument contents", + metadata={"title": "Testing Files"}, + ) + ] diff --git a/libs/partners/box/tests/unit_tests/test_imports.py b/libs/partners/box/tests/unit_tests/test_imports.py index 83507ee1545c8..c25f1fdf242a5 100644 --- a/libs/partners/box/tests/unit_tests/test_imports.py +++ b/libs/partners/box/tests/unit_tests/test_imports.py @@ -2,9 +2,10 @@ EXPECTED_ALL = [ "BoxLoader", + "BoxRetriever", "BoxAuth", "BoxAuthType", - "BoxAPIWrapper", + "_BoxAPIWrapper", "__version__", ] diff --git a/libs/partners/box/tests/unit_tests/utilities/test_box_util.py b/libs/partners/box/tests/unit_tests/utilities/test_box_util.py index ec011f0032087..1eabbdf759236 100644 --- a/libs/partners/box/tests/unit_tests/utilities/test_box_util.py +++ b/libs/partners/box/tests/unit_tests/utilities/test_box_util.py @@ -1,7 +1,21 @@ +from unittest.mock import Mock + import pytest +from langchain_core.documents import Document from pydantic.v1.error_wrappers import ValidationError +from pytest_mock import MockerFixture + +from langchain_box.utilities import BoxAuth, BoxAuthType, _BoxAPIWrapper -from langchain_box.utilities import BoxAPIWrapper, BoxAuth, BoxAuthType + +@pytest.fixture() +def mock_worker(mocker: MockerFixture) -> None: + mocker.patch("langchain_box.utilities.BoxAuth._authorize", return_value=Mock()) + mocker.patch("langchain_box.utilities.BoxAuth.get_client", return_value=Mock()) + mocker.patch( + "langchain_box.utilities._BoxAPIWrapper._get_text_representation", + return_value=("filename", "content", "url"), + ) # Test auth types @@ -79,7 +93,7 @@ def test_failed_ccg_initialization() -> None: def test_direct_token_initialization() -> None: - box = BoxAPIWrapper( # type: ignore[call-arg] + box = _BoxAPIWrapper( # type: ignore[call-arg] box_developer_token="box_developer_token" ) @@ -91,11 +105,126 @@ def test_auth_initialization() -> None: auth_type=BoxAuthType.TOKEN, box_developer_token="box_developer_token" ) - box = BoxAPIWrapper(box_auth=auth) # type: ignore[call-arg] # noqa: F841 + box = _BoxAPIWrapper(box_auth=auth) # type: ignore[call-arg] # noqa: F841 assert auth.box_developer_token == "box_developer_token" def test_failed_initialization_no_auth() -> None: with pytest.raises(ValidationError): - box = BoxAPIWrapper() # type: ignore[call-arg] # noqa: F841 + box = _BoxAPIWrapper() # type: ignore[call-arg] # noqa: F841 + + +def test_get_documents_by_file_ids(mock_worker, mocker: MockerFixture) -> None: # type: ignore[no-untyped-def] + mocker.patch( + "langchain_box.utilities._BoxAPIWrapper.get_document_by_file_id", + return_value=( + Document( + page_content="content", metadata={"source": "url", "title": "filename"} + ) + ), + ) + + box = _BoxAPIWrapper(box_developer_token="box_developer_token") # type: ignore[call-arg] + + documents = box.get_document_by_file_id("box_file_id") + assert documents == Document( + page_content="content", metadata={"source": "url", "title": "filename"} + ) + + +def test_get_documents_by_folder_id(mock_worker, mocker: MockerFixture) -> None: # type: ignore[no-untyped-def] + mocker.patch( + "langchain_box.utilities._BoxAPIWrapper.get_folder_items", + return_value=([{"id": "file_id", "type": "file"}]), + ) + + box = _BoxAPIWrapper(box_developer_token="box_developer_token") # type: ignore[call-arg] + + folder_contents = box.get_folder_items("box_folder_id") + assert folder_contents == [{"id": "file_id", "type": "file"}] + + +def test_box_search(mock_worker, mocker: MockerFixture) -> None: # type: ignore[no-untyped-def] + mocker.patch( + "langchain_box.utilities._BoxAPIWrapper.search_box", + return_value=( + [ + Document( + page_content="Test file mode\ndocument contents", + metadata={"title": "Testing Files"}, + ) + ] + ), + ) + + box = _BoxAPIWrapper(box_developer_token="box_developer_token") # type: ignore[call-arg] + + documents = box.search_box("query") + assert documents == [ + Document( + page_content="Test file mode\ndocument contents", + metadata={"title": "Testing Files"}, + ) + ] + + +def test_ask_box_ai_single_file(mock_worker, mocker: MockerFixture) -> None: # type: ignore[no-untyped-def] + mocker.patch( + "langchain_box.utilities._BoxAPIWrapper.ask_box_ai", + return_value=( + [ + Document( + page_content="Test file mode\ndocument contents", + metadata={"title": "Testing Files"}, + ) + ] + ), + ) + + box = _BoxAPIWrapper( # type: ignore[call-arg] + box_developer_token="box_developer_token", box_file_ids=["box_file_ids"] + ) + + documents = box.ask_box_ai("query") # type: ignore[call-arg] + assert documents == [ + Document( + page_content="Test file mode\ndocument contents", + metadata={"title": "Testing Files"}, + ) + ] + + +def test_ask_box_ai_multiple_files(mock_worker, mocker: MockerFixture) -> None: # type: ignore[no-untyped-def] + mocker.patch( + "langchain_box.utilities._BoxAPIWrapper.ask_box_ai", + return_value=( + [ + Document( + page_content="Test file 1 mode\ndocument contents", + metadata={"title": "Test File 1"}, + ), + Document( + page_content="Test file 2 mode\ndocument contents", + metadata={"title": "Test File 2"}, + ), + ] + ), + ) + + box = _BoxAPIWrapper( # type: ignore[call-arg] + box_developer_token="box_developer_token", + box_file_ids=["box_file_id 1", "box_file_id 2"], + ) + + documents = box.ask_box_ai("query") # type: ignore[call-arg] + assert documents == [ + Document( + page_content="Test file 1 mode\ndocument contents", + metadata={"title": "Test File 1"}, + ), + Document( + page_content="Test file 2 mode\ndocument contents", + metadata={"title": "Test File 2"}, + ), + ] From 3981d736df7b113346670f4e26cb8ee0ecaa656e Mon Sep 17 00:00:00 2001 From: Yuki Watanabe <31463517+B-Step62@users.noreply.github.com> Date: Thu, 22 Aug 2024 09:19:28 +0900 Subject: [PATCH 53/80] databricks: Add partner package directory and ChatDatabricks implementation (#25430) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Summary Create `langchain-databricks` as a new partner packages. This PR does not migrate all existing Databricks integration, but the package will eventually contain: * `ChatDatabricks` (implemented in this PR) * `DatabricksVectorSearch` * `DatabricksEmbeddings` * ~`UCFunctionToolkit`~ (will be done after UC SDK work which drastically simplify implementation) Also, this PR does not add integration tests yet. This will be added once the Databricks test workspace is ready. Tagging @efriis as POC ### Tracker [✍️] Create a package and imgrate ChatDatabricks [ ] Migrate DatabricksVectorSearch, DatabricksEmbeddings, and their docs ~[ ] Migrate UCFunctionToolkit and its doc~ [ ] Add provider document and update README.md [ ] Add integration tests and set up secrets (after moved to an external package) [ ] Add deprecation note to the community implementations. --------- Signed-off-by: B-Step62 Co-authored-by: Erick Friis --- docs/Makefile | 2 +- docs/docs/integrations/chat/databricks.ipynb | 20 +- libs/partners/databricks/.gitignore | 1 + libs/partners/databricks/LICENSE | 21 + libs/partners/databricks/Makefile | 62 + libs/partners/databricks/README.md | 24 + .../langchain_databricks/__init__.py | 15 + .../langchain_databricks/chat_models.py | 573 ++++ .../databricks/langchain_databricks/py.typed | 0 libs/partners/databricks/poetry.lock | 2495 +++++++++++++++++ libs/partners/databricks/pyproject.toml | 99 + .../databricks/scripts/check_imports.py | 17 + .../databricks/scripts/check_pydantic.sh | 27 + .../databricks/scripts/lint_imports.sh | 18 + libs/partners/databricks/tests/__init__.py | 0 .../tests/integration_tests/__init__.py | 0 .../tests/integration_tests/test_compile.py | 7 + .../databricks/tests/unit_tests/__init__.py | 0 .../tests/unit_tests/test_chat_models.py | 321 +++ .../tests/unit_tests/test_imports.py | 10 + 20 files changed, 3699 insertions(+), 13 deletions(-) create mode 100644 libs/partners/databricks/.gitignore create mode 100644 libs/partners/databricks/LICENSE create mode 100644 libs/partners/databricks/Makefile create mode 100644 libs/partners/databricks/README.md create mode 100644 libs/partners/databricks/langchain_databricks/__init__.py create mode 100644 libs/partners/databricks/langchain_databricks/chat_models.py create mode 100644 libs/partners/databricks/langchain_databricks/py.typed create mode 100644 libs/partners/databricks/poetry.lock create mode 100644 libs/partners/databricks/pyproject.toml create mode 100644 libs/partners/databricks/scripts/check_imports.py create mode 100755 libs/partners/databricks/scripts/check_pydantic.sh create mode 100755 libs/partners/databricks/scripts/lint_imports.sh create mode 100644 libs/partners/databricks/tests/__init__.py create mode 100644 libs/partners/databricks/tests/integration_tests/__init__.py create mode 100644 libs/partners/databricks/tests/integration_tests/test_compile.py create mode 100644 libs/partners/databricks/tests/unit_tests/__init__.py create mode 100644 libs/partners/databricks/tests/unit_tests/test_chat_models.py create mode 100644 libs/partners/databricks/tests/unit_tests/test_imports.py diff --git a/docs/Makefile b/docs/Makefile index f230ce1203591..8f4c653c7349a 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -18,7 +18,7 @@ for dir; do \ if find "$$dir" -maxdepth 1 -type f \( -name "pyproject.toml" -o -name "setup.py" \) | grep -q .; then \ echo "$$dir"; \ fi \ -done' sh {} + | grep -vE "airbyte|ibm|couchbase" | tr '\n' ' ') +done' sh {} + | grep -vE "airbyte|ibm|couchbase|databricks" | tr '\n' ' ') PORT ?= 3001 diff --git a/docs/docs/integrations/chat/databricks.ipynb b/docs/docs/integrations/chat/databricks.ipynb index f0612c90d1a43..448935d041f95 100644 --- a/docs/docs/integrations/chat/databricks.ipynb +++ b/docs/docs/integrations/chat/databricks.ipynb @@ -31,7 +31,7 @@ "\n", "| Class | Package | Local | Serializable | Package downloads | Package latest |\n", "| :--- | :--- | :---: | :---: | :---: | :---: |\n", - "| [ChatDatabricks](https://api.python.langchain.com/en/latest/chat_models/langchain_community.chat_models.databricks.ChatDatabricks.html) | [langchain-community](https://api.python.langchain.com/en/latest/community_api_reference.html) | ❌ | beta | ![PyPI - Downloads](https://img.shields.io/pypi/dm/langchain-community?style=flat-square&label=%20) | ![PyPI - Version](https://img.shields.io/pypi/v/langchain-community?style=flat-square&label=%20) |\n", + "| [ChatDatabricks](https://api.python.langchain.com/en/latest/chat_models/langchain_community.chat_models.databricks.ChatDatabricks.html) | [langchain-databricks](https://api.python.langchain.com/en/latest/databricks_api_reference.html) | ❌ | beta | ![PyPI - Downloads](https://img.shields.io/pypi/dm/langchain-databricks?style=flat-square&label=%20) | ![PyPI - Version](https://img.shields.io/pypi/v/langchain-databricks?style=flat-square&label=%20) |\n", "\n", "### Model features\n", "| [Tool calling](/docs/how_to/tool_calling/) | [Structured output](/docs/how_to/structured_output/) | JSON mode | [Image input](/docs/how_to/multimodal_inputs/) | Audio input | Video input | [Token-level streaming](/docs/how_to/chat_streaming/) | Native async | [Token usage](/docs/how_to/chat_token_usage_tracking/) | [Logprobs](/docs/how_to/logprobs/) |\n", @@ -99,7 +99,7 @@ "source": [ "### Installation\n", "\n", - "The LangChain Databricks integration lives in the `langchain-community` package. Also, `mlflow >= 2.9 ` is required to run the code in this notebook." + "The LangChain Databricks integration lives in the `langchain-databricks` package." ] }, { @@ -108,7 +108,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install -qU langchain-community mlflow>=2.9.0" + "%pip install -qU langchain-databricks" ] }, { @@ -133,7 +133,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain_community.chat_models import ChatDatabricks\n", + "from langchain_databricks import ChatDatabricks\n", "\n", "chat_model = ChatDatabricks(\n", " endpoint=\"databricks-dbrx-instruct\",\n", @@ -245,9 +245,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Invocation (streaming)\n", - "\n", - "`ChatDatabricks` supports streaming response by `stream` method since `langchain-community>=0.2.1`." + "## Invocation (streaming)" ] }, { @@ -299,7 +297,7 @@ "* An LLM was registered and deployed to [a Databricks serving endpoint](https://docs.databricks.com/machine-learning/model-serving/index.html) via MLflow. The endpoint must have OpenAI-compatible chat input/output format ([reference](https://mlflow.org/docs/latest/llms/deployments/index.html#chat))\n", "* You have [\"Can Query\" permission](https://docs.databricks.com/security/auth-authz/access-control/serving-endpoint-acl.html) to the endpoint.\n", "\n", - "Once the endpoint is ready, the usage pattern is completely same as Foundation Models." + "Once the endpoint is ready, the usage pattern is identical to that of Foundation Models." ] }, { @@ -332,7 +330,7 @@ "\n", "First, create a new Databricks serving endpoint that proxies requests to the target external model. The endpoint creation should be fairy quick for proxying external models.\n", "\n", - "This requires registering OpenAI API Key in Databricks secret manager with the following comment:\n", + "This requires registering your OpenAI API Key within the Databricks secret manager as follows:\n", "```sh\n", "# Replace `` with your scope\n", "databricks secrets create-scope \n", @@ -417,8 +415,6 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain_community.chat_models.databricks import ChatDatabricks\n", - "\n", "llm = ChatDatabricks(endpoint=\"databricks-meta-llama-3-70b-instruct\")\n", "tools = [\n", " {\n", @@ -461,7 +457,7 @@ "source": [ "## API reference\n", "\n", - "For detailed documentation of all ChatDatabricks features and configurations head to the API reference: https://api.python.langchain.com/en/latest/chat_models/langchain_community.chat_models.ChatDatabricks.html" + "For detailed documentation of all ChatDatabricks features and configurations head to the API reference: https://api.python.langchain.com/en/latest/chat_models/langchain_databricks.chat_models.ChatDatabricks.html" ] } ], diff --git a/libs/partners/databricks/.gitignore b/libs/partners/databricks/.gitignore new file mode 100644 index 0000000000000..bee8a64b79a99 --- /dev/null +++ b/libs/partners/databricks/.gitignore @@ -0,0 +1 @@ +__pycache__ diff --git a/libs/partners/databricks/LICENSE b/libs/partners/databricks/LICENSE new file mode 100644 index 0000000000000..fc0602feecdd6 --- /dev/null +++ b/libs/partners/databricks/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 LangChain, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/libs/partners/databricks/Makefile b/libs/partners/databricks/Makefile new file mode 100644 index 0000000000000..91babddc1619d --- /dev/null +++ b/libs/partners/databricks/Makefile @@ -0,0 +1,62 @@ +.PHONY: all format lint test tests integration_tests docker_tests help extended_tests + +# Default target executed when no arguments are given to make. +all: help + +# Define a variable for the test file path. +TEST_FILE ?= tests/unit_tests/ +integration_test integration_tests: TEST_FILE = tests/integration_tests/ + + +# unit tests are run with the --disable-socket flag to prevent network calls +test tests: + poetry run pytest --disable-socket --allow-unix-socket $(TEST_FILE) + +# integration tests are run without the --disable-socket flag to allow network calls +integration_test integration_tests: + poetry run pytest $(TEST_FILE) + +###################### +# LINTING AND FORMATTING +###################### + +# Define a variable for Python and notebook files. +PYTHON_FILES=. +MYPY_CACHE=.mypy_cache +lint format: PYTHON_FILES=. +lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/partners/databricks --name-only --diff-filter=d master | grep -E '\.py$$|\.ipynb$$') +lint_package: PYTHON_FILES=langchain_databricks +lint_tests: PYTHON_FILES=tests +lint_tests: MYPY_CACHE=.mypy_cache_test + +lint lint_diff lint_package lint_tests: + poetry run ruff check . + poetry run ruff format $(PYTHON_FILES) --diff + poetry run ruff check --select I $(PYTHON_FILES) + mkdir -p $(MYPY_CACHE); poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) + +format format_diff: + poetry run ruff format $(PYTHON_FILES) + poetry run ruff check --select I --fix $(PYTHON_FILES) + +spell_check: + poetry run codespell --toml pyproject.toml + +spell_fix: + poetry run codespell --toml pyproject.toml -w + +check_imports: $(shell find langchain_databricks -name '*.py') + poetry run python ./scripts/check_imports.py $^ + +###################### +# HELP +###################### + +help: + @echo '----' + @echo 'check_imports - check imports' + @echo 'format - run code formatters' + @echo 'lint - run linters' + @echo 'test - run unit tests' + @echo 'tests - run unit tests' + @echo 'test TEST_FILE= - run all tests in file' diff --git a/libs/partners/databricks/README.md b/libs/partners/databricks/README.md new file mode 100644 index 0000000000000..acba5c7707e68 --- /dev/null +++ b/libs/partners/databricks/README.md @@ -0,0 +1,24 @@ +# langchain-databricks + +This package contains the LangChain integration with Databricks + +## Installation + +```bash +pip install -U langchain-databricks +``` + +And you should configure credentials by setting the following environment variables: + +* TODO: fill this out + +## Chat Models + +`ChatDatabricks` class exposes chat models from Databricks. + +```python +from langchain_databricks import ChatDatabricks + +llm = ChatDatabricks() +llm.invoke("Sing a ballad of LangChain.") +``` \ No newline at end of file diff --git a/libs/partners/databricks/langchain_databricks/__init__.py b/libs/partners/databricks/langchain_databricks/__init__.py new file mode 100644 index 0000000000000..3bd93de6c3ae2 --- /dev/null +++ b/libs/partners/databricks/langchain_databricks/__init__.py @@ -0,0 +1,15 @@ +from importlib import metadata + +from langchain_databricks.chat_models import ChatDatabricks + +try: + __version__ = metadata.version(__package__) +except metadata.PackageNotFoundError: + # Case where package metadata is not available. + __version__ = "" +del metadata # optional, avoids polluting the results of dir(__package__) + +__all__ = [ + "ChatDatabricks", + "__version__", +] diff --git a/libs/partners/databricks/langchain_databricks/chat_models.py b/libs/partners/databricks/langchain_databricks/chat_models.py new file mode 100644 index 0000000000000..fa24c08415f6c --- /dev/null +++ b/libs/partners/databricks/langchain_databricks/chat_models.py @@ -0,0 +1,573 @@ +"""Databricks chat models.""" + +import json +import logging +from typing import ( + Any, + Callable, + Dict, + Iterator, + List, + Literal, + Mapping, + Optional, + Sequence, + Type, + Union, +) +from urllib.parse import urlparse + +from langchain_core.callbacks import CallbackManagerForLLMRun +from langchain_core.language_models import BaseChatModel +from langchain_core.language_models.base import LanguageModelInput +from langchain_core.messages import ( + AIMessage, + AIMessageChunk, + BaseMessage, + BaseMessageChunk, + ChatMessage, + ChatMessageChunk, + FunctionMessage, + HumanMessage, + HumanMessageChunk, + SystemMessage, + SystemMessageChunk, + ToolMessage, + ToolMessageChunk, +) +from langchain_core.messages.tool import tool_call_chunk +from langchain_core.output_parsers.openai_tools import ( + make_invalid_tool_call, + parse_tool_call, +) +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult +from langchain_core.pydantic_v1 import ( + BaseModel, + Field, + PrivateAttr, +) +from langchain_core.runnables import Runnable +from langchain_core.tools import BaseTool +from langchain_core.utils.function_calling import convert_to_openai_tool + +logger = logging.getLogger(__name__) + + +class ChatDatabricks(BaseChatModel): + """Databricks chat model integration. + + Setup: + Install ``langchain-databricks``. + + .. code-block:: bash + + pip install -U langchain-databricks + + If you are outside Databricks, set the Databricks workspace hostname and personal access token to environment variables: + + .. code-block:: bash + + export DATABRICKS_HOSTNAME="https://your-databricks-workspace" + export DATABRICKS_TOKEN="your-personal-access-token" + + Key init args — completion params: + endpoint: str + Name of Databricks Model Serving endpoint to query. + target_uri: str + The target URI to use. Defaults to ``databricks``. + temperature: float + Sampling temperature. Higher values make the model more creative. + n: Optional[int] + The number of completion choices to generate. + stop: Optional[List[str]] + List of strings to stop generation at. + max_tokens: Optional[int] + Max number of tokens to generate. + extra_params: Optional[Dict[str, Any]] + Any extra parameters to pass to the endpoint. + + Instantiate: + .. code-block:: python + + from langchain_databricks import ChatDatabricks + llm = ChatDatabricks( + endpoint="databricks-meta-llama-3-1-405b-instruct", + temperature=0, + max_tokens=500, + ) + + Invoke: + .. code-block:: python + + messages = [ + ("system", "You are a helpful translator. Translate the user sentence to French."), + ("human", "I love programming."), + ] + llm.invoke(messages) + + .. code-block:: python + + AIMessage( + content="J'adore la programmation.", + response_metadata={ + 'prompt_tokens': 32, + 'completion_tokens': 9, + 'total_tokens': 41 + }, + id='run-64eebbdd-88a8-4a25-b508-21e9a5f146c5-0' + ) + + Stream: + .. code-block:: python + + for chunk in llm.stream(messages): + print(chunk) + + .. code-block:: python + + content='J' id='run-609b8f47-e580-4691-9ee4-e2109f53155e' + content="'" id='run-609b8f47-e580-4691-9ee4-e2109f53155e' + content='ad' id='run-609b8f47-e580-4691-9ee4-e2109f53155e' + content='ore' id='run-609b8f47-e580-4691-9ee4-e2109f53155e' + content=' la' id='run-609b8f47-e580-4691-9ee4-e2109f53155e' + content=' programm' id='run-609b8f47-e580-4691-9ee4-e2109f53155e' + content='ation' id='run-609b8f47-e580-4691-9ee4-e2109f53155e' + content='.' id='run-609b8f47-e580-4691-9ee4-e2109f53155e' + content='' response_metadata={'finish_reason': 'stop'} id='run-609b8f47-e580-4691-9ee4-e2109f53155e' + + .. code-block:: python + + stream = llm.stream(messages) + full = next(stream) + for chunk in stream: + full += chunk + full + + .. code-block:: python + + AIMessageChunk( + content="J'adore la programmation.", + response_metadata={ + 'finish_reason': 'stop' + }, + id='run-4cef851f-6223-424f-ad26-4a54e5852aa5' + ) + + Async: + .. code-block:: python + + await llm.ainvoke(messages) + + # stream: + # async for chunk in llm.astream(messages) + + # batch: + # await llm.abatch([messages]) + + .. code-block:: python + + AIMessage( + content="J'adore la programmation.", + response_metadata={ + 'prompt_tokens': 32, + 'completion_tokens': 9, + 'total_tokens': 41 + }, + id='run-e4bb043e-772b-4e1d-9f98-77ccc00c0271-0' + ) + + Tool calling: + .. code-block:: python + + from langchain_core.pydantic_v1 import BaseModel, Field + + class GetWeather(BaseModel): + '''Get the current weather in a given location''' + + location: str = Field(..., description="The city and state, e.g. San Francisco, CA") + + class GetPopulation(BaseModel): + '''Get the current population in a given location''' + + location: str = Field(..., description="The city and state, e.g. San Francisco, CA") + + llm_with_tools = llm.bind_tools([GetWeather, GetPopulation]) + ai_msg = llm_with_tools.invoke("Which city is hotter today and which is bigger: LA or NY?") + ai_msg.tool_calls + + .. code-block:: python + + [ + { + 'name': 'GetWeather', + 'args': { + 'location': 'Los Angeles, CA' + }, + 'id': 'call_ea0a6004-8e64-4ae8-a192-a40e295bfa24', + 'type': 'tool_call' + } + ] + + To use tool calls, your model endpoint must support ``tools`` parameter. See [Function calling on Databricks](https://python.langchain.com/v0.2/docs/integrations/chat/databricks/#function-calling-on-databricks) for more information. + + """ # noqa: E501 + + endpoint: str + """Name of Databricks Model Serving endpoint to query.""" + target_uri: str = "databricks" + """The target URI to use. Defaults to ``databricks``.""" + temperature: float = 0.0 + """Sampling temperature. Higher values make the model more creative.""" + n: int = 1 + """The number of completion choices to generate.""" + stop: Optional[List[str]] = None + """List of strings to stop generation at.""" + max_tokens: Optional[int] = None + """The maximum number of tokens to generate.""" + extra_params: dict = Field(default_factory=dict) + """Any extra parameters to pass to the endpoint.""" + _client: Any = PrivateAttr() + + def __init__(self, **kwargs: Any): + super().__init__(**kwargs) + self._validate_uri() + try: + from mlflow.deployments import get_deploy_client # type: ignore + + self._client = get_deploy_client(self.target_uri) + except ImportError as e: + raise ImportError( + "Failed to create the client. Please run `pip install mlflow` to " + "install required dependencies." + ) from e + + def _validate_uri(self) -> None: + if self.target_uri == "databricks": + return + + if urlparse(self.target_uri).scheme != "databricks": + raise ValueError( + "Invalid target URI. The target URI must be a valid databricks URI." + ) + + @property + def _default_params(self) -> Dict[str, Any]: + params: Dict[str, Any] = { + "target_uri": self.target_uri, + "endpoint": self.endpoint, + "temperature": self.temperature, + "n": self.n, + "stop": self.stop, + "max_tokens": self.max_tokens, + "extra_params": self.extra_params, + } + return params + + def _generate( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> ChatResult: + data = self._prepare_inputs(messages, stop, **kwargs) + resp = self._client.predict(endpoint=self.endpoint, inputs=data) + return self._convert_response_to_chat_result(resp) + + def _prepare_inputs( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + **kwargs: Any, + ) -> Dict[str, Any]: + data: Dict[str, Any] = { + "messages": [_convert_message_to_dict(msg) for msg in messages], + "temperature": self.temperature, + "n": self.n, + **self.extra_params, + **kwargs, + } + if stop := self.stop or stop: + data["stop"] = stop + if self.max_tokens is not None: + data["max_tokens"] = self.max_tokens + + return data + + def _convert_response_to_chat_result( + self, response: Mapping[str, Any] + ) -> ChatResult: + generations = [ + ChatGeneration( + message=_convert_dict_to_message(choice["message"]), + generation_info=choice.get("usage", {}), + ) + for choice in response["choices"] + ] + usage = response.get("usage", {}) + return ChatResult(generations=generations, llm_output=usage) + + def _stream( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> Iterator[ChatGenerationChunk]: + data = self._prepare_inputs(messages, stop, **kwargs) + first_chunk_role = None + for chunk in self._client.predict_stream(endpoint=self.endpoint, inputs=data): + if chunk["choices"]: + choice = chunk["choices"][0] + + chunk_delta = choice["delta"] + if first_chunk_role is None: + first_chunk_role = chunk_delta.get("role") + + chunk_message = _convert_dict_to_message_chunk( + chunk_delta, first_chunk_role + ) + + generation_info = {} + if finish_reason := choice.get("finish_reason"): + generation_info["finish_reason"] = finish_reason + if logprobs := choice.get("logprobs"): + generation_info["logprobs"] = logprobs + + chunk = ChatGenerationChunk( + message=chunk_message, generation_info=generation_info or None + ) + + if run_manager: + run_manager.on_llm_new_token( + chunk.text, chunk=chunk, logprobs=logprobs + ) + + yield chunk + else: + # Handle the case where choices are empty if needed + continue + + def bind_tools( + self, + tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]], + *, + tool_choice: Optional[ + Union[dict, str, Literal["auto", "none", "required", "any"], bool] + ] = None, + **kwargs: Any, + ) -> Runnable[LanguageModelInput, BaseMessage]: + """Bind tool-like objects to this chat model. + + Assumes model is compatible with OpenAI tool-calling API. + + Args: + tools: A list of tool definitions to bind to this chat model. + Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic + models, callables, and BaseTools will be automatically converted to + their schema dictionary representation. + tool_choice: Which tool to require the model to call. + Options are: + name of the tool (str): calls corresponding tool; + "auto": automatically selects a tool (including no tool); + "none": model does not generate any tool calls and instead must + generate a standard assistant message; + "required": the model picks the most relevant tool in tools and + must generate a tool call; + + or a dict of the form: + {"type": "function", "function": {"name": <>}}. + **kwargs: Any additional parameters to pass to the + :class:`~langchain.runnable.Runnable` constructor. + """ + formatted_tools = [convert_to_openai_tool(tool) for tool in tools] + if tool_choice: + if isinstance(tool_choice, str): + # tool_choice is a tool/function name + if tool_choice not in ("auto", "none", "required"): + tool_choice = { + "type": "function", + "function": {"name": tool_choice}, + } + elif isinstance(tool_choice, dict): + tool_names = [ + formatted_tool["function"]["name"] + for formatted_tool in formatted_tools + ] + if not any( + tool_name == tool_choice["function"]["name"] + for tool_name in tool_names + ): + raise ValueError( + f"Tool choice {tool_choice} was specified, but the only " + f"provided tools were {tool_names}." + ) + else: + raise ValueError( + f"Unrecognized tool_choice type. Expected str, bool or dict. " + f"Received: {tool_choice}" + ) + kwargs["tool_choice"] = tool_choice + return super().bind(tools=formatted_tools, **kwargs) + + @property + def _llm_type(self) -> str: + """Return type of chat model.""" + return "chat-databricks" + + +### Conversion function to convert Pydantic models to dictionaries and vice versa. ### + + +def _convert_message_to_dict(message: BaseMessage) -> dict: + message_dict = {"content": message.content} + + # OpenAI supports "name" field in messages. + if (name := message.name or message.additional_kwargs.get("name")) is not None: + message_dict["name"] = name + + if id := message.id: + message_dict["id"] = id + + if isinstance(message, ChatMessage): + return {"role": message.role, **message_dict} + elif isinstance(message, HumanMessage): + return {"role": "user", **message_dict} + elif isinstance(message, AIMessage): + if tool_calls := _get_tool_calls_from_ai_message(message): + message_dict["tool_calls"] = tool_calls # type: ignore[assignment] + # If tool calls present, content null value should be None not empty string. + message_dict["content"] = message_dict["content"] or None # type: ignore[assignment] + return {"role": "assistant", **message_dict} + elif isinstance(message, SystemMessage): + return {"role": "system", **message_dict} + elif isinstance(message, ToolMessage): + return { + "role": "tool", + "tool_call_id": message.tool_call_id, + **message_dict, + } + elif ( + isinstance(message, FunctionMessage) + or "function_call" in message.additional_kwargs + ): + raise ValueError( + "Function messages are not supported by Databricks. Please" + " create a feature request at https://github.com/mlflow/mlflow/issues." + ) + else: + raise ValueError(f"Got unknown message type: {type(message)}") + + +def _get_tool_calls_from_ai_message(message: AIMessage) -> List[Dict]: + tool_calls = [ + { + "type": "function", + "id": tc["id"], + "function": { + "name": tc["name"], + "arguments": json.dumps(tc["args"]), + }, + } + for tc in message.tool_calls + ] + + invalid_tool_calls = [ + { + "type": "function", + "id": tc["id"], + "function": { + "name": tc["name"], + "arguments": tc["args"], + }, + } + for tc in message.invalid_tool_calls + ] + + if tool_calls or invalid_tool_calls: + return tool_calls + invalid_tool_calls + + # Get tool calls from additional kwargs if present. + return [ + { + k: v + for k, v in tool_call.items() # type: ignore[union-attr] + if k in {"id", "type", "function"} + } + for tool_call in message.additional_kwargs.get("tool_calls", []) + ] + + +def _convert_dict_to_message(_dict: Dict) -> BaseMessage: + role = _dict["role"] + content = _dict.get("content") + content = content if content is not None else "" + + if role == "user": + return HumanMessage(content=content) + elif role == "system": + return SystemMessage(content=content) + elif role == "assistant": + additional_kwargs: Dict = {} + tool_calls = [] + invalid_tool_calls = [] + if raw_tool_calls := _dict.get("tool_calls"): + additional_kwargs["tool_calls"] = raw_tool_calls + for raw_tool_call in raw_tool_calls: + try: + tool_calls.append(parse_tool_call(raw_tool_call, return_id=True)) + except Exception as e: + invalid_tool_calls.append( + make_invalid_tool_call(raw_tool_call, str(e)) + ) + return AIMessage( + content=content, + additional_kwargs=additional_kwargs, + id=_dict.get("id"), + tool_calls=tool_calls, + invalid_tool_calls=invalid_tool_calls, + ) + else: + return ChatMessage(content=content, role=role) + + +def _convert_dict_to_message_chunk( + _dict: Mapping[str, Any], default_role: str +) -> BaseMessageChunk: + role = _dict.get("role", default_role) + content = _dict.get("content") + content = content if content is not None else "" + + if role == "user": + return HumanMessageChunk(content=content) + elif role == "system": + return SystemMessageChunk(content=content) + elif role == "tool": + return ToolMessageChunk( + content=content, tool_call_id=_dict["tool_call_id"], id=_dict.get("id") + ) + elif role == "assistant": + additional_kwargs: Dict = {} + tool_call_chunks = [] + if raw_tool_calls := _dict.get("tool_calls"): + additional_kwargs["tool_calls"] = raw_tool_calls + try: + tool_call_chunks = [ + tool_call_chunk( + name=tc["function"].get("name"), + args=tc["function"].get("arguments"), + id=tc.get("id"), + index=tc["index"], + ) + for tc in raw_tool_calls + ] + except KeyError: + pass + return AIMessageChunk( + content=content, + additional_kwargs=additional_kwargs, + id=_dict.get("id"), + tool_call_chunks=tool_call_chunks, + ) + else: + return ChatMessageChunk(content=content, role=role) diff --git a/libs/partners/databricks/langchain_databricks/py.typed b/libs/partners/databricks/langchain_databricks/py.typed new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/databricks/poetry.lock b/libs/partners/databricks/poetry.lock new file mode 100644 index 0000000000000..866aaa1a2f6bb --- /dev/null +++ b/libs/partners/databricks/poetry.lock @@ -0,0 +1,2495 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "alembic" +version = "1.13.2" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.8" +files = [ + {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, + {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, +] + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} +importlib-resources = {version = "*", markers = "python_version < \"3.9\""} +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] + +[[package]] +name = "aniso8601" +version = "9.0.1" +description = "A library for parsing ISO 8601 strings." +optional = false +python-versions = "*" +files = [ + {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, + {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, +] + +[package.extras] +dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} + +[[package]] +name = "blinker" +version = "1.8.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + +[[package]] +name = "cachetools" +version = "5.4.0" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, + {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, +] + +[[package]] +name = "certifi" +version = "2024.7.4" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "cloudpickle" +version = "3.0.0" +description = "Pickler class to extend the standard pickle.Pickler functionality" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"}, + {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, +] + +[[package]] +name = "codespell" +version = "2.3.0" +description = "Codespell" +optional = false +python-versions = ">=3.8" +files = [ + {file = "codespell-2.3.0-py3-none-any.whl", hash = "sha256:a9c7cef2501c9cfede2110fd6d4e5e62296920efe9abfb84648df866e47f58d1"}, + {file = "codespell-2.3.0.tar.gz", hash = "sha256:360c7d10f75e65f67bad720af7007e1060a5d395670ec11a7ed1fed9dd17471f"}, +] + +[package.extras] +dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] +hard-encoding-detection = ["chardet"] +toml = ["tomli"] +types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "contourpy" +version = "1.1.1" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.8" +files = [ + {file = "contourpy-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:46e24f5412c948d81736509377e255f6040e94216bf1a9b5ea1eaa9d29f6ec1b"}, + {file = "contourpy-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e48694d6a9c5a26ee85b10130c77a011a4fedf50a7279fa0bdaf44bafb4299d"}, + {file = "contourpy-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a66045af6cf00e19d02191ab578a50cb93b2028c3eefed999793698e9ea768ae"}, + {file = "contourpy-1.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ebf42695f75ee1a952f98ce9775c873e4971732a87334b099dde90b6af6a916"}, + {file = "contourpy-1.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6aec19457617ef468ff091669cca01fa7ea557b12b59a7908b9474bb9674cf0"}, + {file = "contourpy-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:462c59914dc6d81e0b11f37e560b8a7c2dbab6aca4f38be31519d442d6cde1a1"}, + {file = "contourpy-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6d0a8efc258659edc5299f9ef32d8d81de8b53b45d67bf4bfa3067f31366764d"}, + {file = "contourpy-1.1.1-cp310-cp310-win32.whl", hash = "sha256:d6ab42f223e58b7dac1bb0af32194a7b9311065583cc75ff59dcf301afd8a431"}, + {file = "contourpy-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:549174b0713d49871c6dee90a4b499d3f12f5e5f69641cd23c50a4542e2ca1eb"}, + {file = "contourpy-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:407d864db716a067cc696d61fa1ef6637fedf03606e8417fe2aeed20a061e6b2"}, + {file = "contourpy-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe80c017973e6a4c367e037cb31601044dd55e6bfacd57370674867d15a899b"}, + {file = "contourpy-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e30aaf2b8a2bac57eb7e1650df1b3a4130e8d0c66fc2f861039d507a11760e1b"}, + {file = "contourpy-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3de23ca4f381c3770dee6d10ead6fff524d540c0f662e763ad1530bde5112532"}, + {file = "contourpy-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:566f0e41df06dfef2431defcfaa155f0acfa1ca4acbf8fd80895b1e7e2ada40e"}, + {file = "contourpy-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04c2f0adaf255bf756cf08ebef1be132d3c7a06fe6f9877d55640c5e60c72c5"}, + {file = "contourpy-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d0c188ae66b772d9d61d43c6030500344c13e3f73a00d1dc241da896f379bb62"}, + {file = "contourpy-1.1.1-cp311-cp311-win32.whl", hash = "sha256:0683e1ae20dc038075d92e0e0148f09ffcefab120e57f6b4c9c0f477ec171f33"}, + {file = "contourpy-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:8636cd2fc5da0fb102a2504fa2c4bea3cbc149533b345d72cdf0e7a924decc45"}, + {file = "contourpy-1.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:560f1d68a33e89c62da5da4077ba98137a5e4d3a271b29f2f195d0fba2adcb6a"}, + {file = "contourpy-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:24216552104ae8f3b34120ef84825400b16eb6133af2e27a190fdc13529f023e"}, + {file = "contourpy-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56de98a2fb23025882a18b60c7f0ea2d2d70bbbcfcf878f9067234b1c4818442"}, + {file = "contourpy-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:07d6f11dfaf80a84c97f1a5ba50d129d9303c5b4206f776e94037332e298dda8"}, + {file = "contourpy-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1eaac5257a8f8a047248d60e8f9315c6cff58f7803971170d952555ef6344a7"}, + {file = "contourpy-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19557fa407e70f20bfaba7d55b4d97b14f9480856c4fb65812e8a05fe1c6f9bf"}, + {file = "contourpy-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:081f3c0880712e40effc5f4c3b08feca6d064cb8cfbb372ca548105b86fd6c3d"}, + {file = "contourpy-1.1.1-cp312-cp312-win32.whl", hash = "sha256:059c3d2a94b930f4dafe8105bcdc1b21de99b30b51b5bce74c753686de858cb6"}, + {file = "contourpy-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:f44d78b61740e4e8c71db1cf1fd56d9050a4747681c59ec1094750a658ceb970"}, + {file = "contourpy-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:70e5a10f8093d228bb2b552beeb318b8928b8a94763ef03b858ef3612b29395d"}, + {file = "contourpy-1.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8394e652925a18ef0091115e3cc191fef350ab6dc3cc417f06da66bf98071ae9"}, + {file = "contourpy-1.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5bd5680f844c3ff0008523a71949a3ff5e4953eb7701b28760805bc9bcff217"}, + {file = "contourpy-1.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66544f853bfa85c0d07a68f6c648b2ec81dafd30f272565c37ab47a33b220684"}, + {file = "contourpy-1.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0c02b75acfea5cab07585d25069207e478d12309557f90a61b5a3b4f77f46ce"}, + {file = "contourpy-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41339b24471c58dc1499e56783fedc1afa4bb018bcd035cfb0ee2ad2a7501ef8"}, + {file = "contourpy-1.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f29fb0b3f1217dfe9362ec55440d0743fe868497359f2cf93293f4b2701b8251"}, + {file = "contourpy-1.1.1-cp38-cp38-win32.whl", hash = "sha256:f9dc7f933975367251c1b34da882c4f0e0b2e24bb35dc906d2f598a40b72bfc7"}, + {file = "contourpy-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:498e53573e8b94b1caeb9e62d7c2d053c263ebb6aa259c81050766beb50ff8d9"}, + {file = "contourpy-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ba42e3810999a0ddd0439e6e5dbf6d034055cdc72b7c5c839f37a7c274cb4eba"}, + {file = "contourpy-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c06e4c6e234fcc65435223c7b2a90f286b7f1b2733058bdf1345d218cc59e34"}, + {file = "contourpy-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca6fab080484e419528e98624fb5c4282148b847e3602dc8dbe0cb0669469887"}, + {file = "contourpy-1.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93df44ab351119d14cd1e6b52a5063d3336f0754b72736cc63db59307dabb718"}, + {file = "contourpy-1.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eafbef886566dc1047d7b3d4b14db0d5b7deb99638d8e1be4e23a7c7ac59ff0f"}, + {file = "contourpy-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efe0fab26d598e1ec07d72cf03eaeeba8e42b4ecf6b9ccb5a356fde60ff08b85"}, + {file = "contourpy-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f08e469821a5e4751c97fcd34bcb586bc243c39c2e39321822060ba902eac49e"}, + {file = "contourpy-1.1.1-cp39-cp39-win32.whl", hash = "sha256:bfc8a5e9238232a45ebc5cb3bfee71f1167064c8d382cadd6076f0d51cff1da0"}, + {file = "contourpy-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:c84fdf3da00c2827d634de4fcf17e3e067490c4aea82833625c4c8e6cdea0887"}, + {file = "contourpy-1.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:229a25f68046c5cf8067d6d6351c8b99e40da11b04d8416bf8d2b1d75922521e"}, + {file = "contourpy-1.1.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a10dab5ea1bd4401c9483450b5b0ba5416be799bbd50fc7a6cc5e2a15e03e8a3"}, + {file = "contourpy-1.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4f9147051cb8fdb29a51dc2482d792b3b23e50f8f57e3720ca2e3d438b7adf23"}, + {file = "contourpy-1.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a75cc163a5f4531a256f2c523bd80db509a49fc23721b36dd1ef2f60ff41c3cb"}, + {file = "contourpy-1.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b53d5769aa1f2d4ea407c65f2d1d08002952fac1d9e9d307aa2e1023554a163"}, + {file = "contourpy-1.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11b836b7dbfb74e049c302bbf74b4b8f6cb9d0b6ca1bf86cfa8ba144aedadd9c"}, + {file = "contourpy-1.1.1.tar.gz", hash = "sha256:96ba37c2e24b7212a77da85004c38e7c4d155d3e72a45eeaf22c1f03f607e8ab"}, +] + +[package.dependencies] +numpy = {version = ">=1.16,<2.0", markers = "python_version <= \"3.11\""} + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.4.1)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "wurlitzer"] + +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "databricks-sdk" +version = "0.30.0" +description = "Databricks SDK for Python (Beta)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "databricks_sdk-0.30.0-py3-none-any.whl", hash = "sha256:c3f954c73cdd703815acfa31a2e8442ee85aa3ca3ba8d52f05e85ebce29233d7"}, + {file = "databricks_sdk-0.30.0.tar.gz", hash = "sha256:37c7a12939da09dbdcb6ceba4fcad5f484a63508366225f797429ae4ee557b21"}, +] + +[package.dependencies] +google-auth = ">=2.0,<3.0" +requests = ">=2.28.1,<3" + +[package.extras] +dev = ["autoflake", "databricks-connect", "ipython", "ipywidgets", "isort", "pycodestyle", "pyfakefs", "pytest", "pytest-cov", "pytest-mock", "pytest-rerunfailures", "pytest-xdist", "requests-mock", "wheel", "yapf"] +notebook = ["ipython (>=8,<9)", "ipywidgets (>=8,<9)"] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "docker" +version = "7.1.0" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, + {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, +] + +[package.dependencies] +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" + +[package.extras] +dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] +docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] +ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] + +[[package]] +name = "entrypoints" +version = "0.4" +description = "Discover and load entry points from installed packages." +optional = false +python-versions = ">=3.6" +files = [ + {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, + {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "flask" +version = "3.0.3" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, + {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "fonttools" +version = "4.53.1" +description = "Tools to manipulate font files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fonttools-4.53.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0679a30b59d74b6242909945429dbddb08496935b82f91ea9bf6ad240ec23397"}, + {file = "fonttools-4.53.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8bf06b94694251861ba7fdeea15c8ec0967f84c3d4143ae9daf42bbc7717fe3"}, + {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b96cd370a61f4d083c9c0053bf634279b094308d52fdc2dd9a22d8372fdd590d"}, + {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1c7c5aa18dd3b17995898b4a9b5929d69ef6ae2af5b96d585ff4005033d82f0"}, + {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e013aae589c1c12505da64a7d8d023e584987e51e62006e1bb30d72f26522c41"}, + {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9efd176f874cb6402e607e4cc9b4a9cd584d82fc34a4b0c811970b32ba62501f"}, + {file = "fonttools-4.53.1-cp310-cp310-win32.whl", hash = "sha256:c8696544c964500aa9439efb6761947393b70b17ef4e82d73277413f291260a4"}, + {file = "fonttools-4.53.1-cp310-cp310-win_amd64.whl", hash = "sha256:8959a59de5af6d2bec27489e98ef25a397cfa1774b375d5787509c06659b3671"}, + {file = "fonttools-4.53.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:da33440b1413bad53a8674393c5d29ce64d8c1a15ef8a77c642ffd900d07bfe1"}, + {file = "fonttools-4.53.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ff7e5e9bad94e3a70c5cd2fa27f20b9bb9385e10cddab567b85ce5d306ea923"}, + {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6e7170d675d12eac12ad1a981d90f118c06cf680b42a2d74c6c931e54b50719"}, + {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee32ea8765e859670c4447b0817514ca79054463b6b79784b08a8df3a4d78e3"}, + {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e08f572625a1ee682115223eabebc4c6a2035a6917eac6f60350aba297ccadb"}, + {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b21952c092ffd827504de7e66b62aba26fdb5f9d1e435c52477e6486e9d128b2"}, + {file = "fonttools-4.53.1-cp311-cp311-win32.whl", hash = "sha256:9dfdae43b7996af46ff9da520998a32b105c7f098aeea06b2226b30e74fbba88"}, + {file = "fonttools-4.53.1-cp311-cp311-win_amd64.whl", hash = "sha256:d4d0096cb1ac7a77b3b41cd78c9b6bc4a400550e21dc7a92f2b5ab53ed74eb02"}, + {file = "fonttools-4.53.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d92d3c2a1b39631a6131c2fa25b5406855f97969b068e7e08413325bc0afba58"}, + {file = "fonttools-4.53.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b3c8ebafbee8d9002bd8f1195d09ed2bd9ff134ddec37ee8f6a6375e6a4f0e8"}, + {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f029c095ad66c425b0ee85553d0dc326d45d7059dbc227330fc29b43e8ba60"}, + {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f5e6c3510b79ea27bb1ebfcc67048cde9ec67afa87c7dd7efa5c700491ac7f"}, + {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f677ce218976496a587ab17140da141557beb91d2a5c1a14212c994093f2eae2"}, + {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9e6ceba2a01b448e36754983d376064730690401da1dd104ddb543519470a15f"}, + {file = "fonttools-4.53.1-cp312-cp312-win32.whl", hash = "sha256:791b31ebbc05197d7aa096bbc7bd76d591f05905d2fd908bf103af4488e60670"}, + {file = "fonttools-4.53.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ed170b5e17da0264b9f6fae86073be3db15fa1bd74061c8331022bca6d09bab"}, + {file = "fonttools-4.53.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c818c058404eb2bba05e728d38049438afd649e3c409796723dfc17cd3f08749"}, + {file = "fonttools-4.53.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:651390c3b26b0c7d1f4407cad281ee7a5a85a31a110cbac5269de72a51551ba2"}, + {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54f1bba2f655924c1138bbc7fa91abd61f45c68bd65ab5ed985942712864bbb"}, + {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9cd19cf4fe0595ebdd1d4915882b9440c3a6d30b008f3cc7587c1da7b95be5f"}, + {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2af40ae9cdcb204fc1d8f26b190aa16534fcd4f0df756268df674a270eab575d"}, + {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:35250099b0cfb32d799fb5d6c651220a642fe2e3c7d2560490e6f1d3f9ae9169"}, + {file = "fonttools-4.53.1-cp38-cp38-win32.whl", hash = "sha256:f08df60fbd8d289152079a65da4e66a447efc1d5d5a4d3f299cdd39e3b2e4a7d"}, + {file = "fonttools-4.53.1-cp38-cp38-win_amd64.whl", hash = "sha256:7b6b35e52ddc8fb0db562133894e6ef5b4e54e1283dff606fda3eed938c36fc8"}, + {file = "fonttools-4.53.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75a157d8d26c06e64ace9df037ee93a4938a4606a38cb7ffaf6635e60e253b7a"}, + {file = "fonttools-4.53.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4824c198f714ab5559c5be10fd1adf876712aa7989882a4ec887bf1ef3e00e31"}, + {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:becc5d7cb89c7b7afa8321b6bb3dbee0eec2b57855c90b3e9bf5fb816671fa7c"}, + {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ec3fb43befb54be490147b4a922b5314e16372a643004f182babee9f9c3407"}, + {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:73379d3ffdeecb376640cd8ed03e9d2d0e568c9d1a4e9b16504a834ebadc2dfb"}, + {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:02569e9a810f9d11f4ae82c391ebc6fb5730d95a0657d24d754ed7763fb2d122"}, + {file = "fonttools-4.53.1-cp39-cp39-win32.whl", hash = "sha256:aae7bd54187e8bf7fd69f8ab87b2885253d3575163ad4d669a262fe97f0136cb"}, + {file = "fonttools-4.53.1-cp39-cp39-win_amd64.whl", hash = "sha256:e5b708073ea3d684235648786f5f6153a48dc8762cdfe5563c57e80787c29fbb"}, + {file = "fonttools-4.53.1-py3-none-any.whl", hash = "sha256:f1f8758a2ad110bd6432203a344269f445a2907dc24ef6bccfd0ac4e14e0d71d"}, + {file = "fonttools-4.53.1.tar.gz", hash = "sha256:e128778a8e9bc11159ce5447f76766cefbd876f44bd79aff030287254e4752c4"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "pycairo", "scipy"] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + +[[package]] +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.43" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, + {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] +test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] + +[[package]] +name = "google-auth" +version = "2.33.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_auth-2.33.0-py2.py3-none-any.whl", hash = "sha256:8eff47d0d4a34ab6265c50a106a3362de6a9975bb08998700e389f857e4d39df"}, + {file = "google_auth-2.33.0.tar.gz", hash = "sha256:d6a52342160d7290e334b4d47ba390767e4438ad0d45b7630774533e82655b95"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "graphene" +version = "3.3" +description = "GraphQL Framework for Python" +optional = false +python-versions = "*" +files = [ + {file = "graphene-3.3-py2.py3-none-any.whl", hash = "sha256:bb3810be33b54cb3e6969506671eb72319e8d7ba0d5ca9c8066472f75bf35a38"}, + {file = "graphene-3.3.tar.gz", hash = "sha256:529bf40c2a698954217d3713c6041d69d3f719ad0080857d7ee31327112446b0"}, +] + +[package.dependencies] +aniso8601 = ">=8,<10" +graphql-core = ">=3.1,<3.3" +graphql-relay = ">=3.1,<3.3" + +[package.extras] +dev = ["black (==22.3.0)", "coveralls (>=3.3,<4)", "flake8 (>=4,<5)", "iso8601 (>=1,<2)", "mock (>=4,<5)", "pytest (>=6,<7)", "pytest-asyncio (>=0.16,<2)", "pytest-benchmark (>=3.4,<4)", "pytest-cov (>=3,<4)", "pytest-mock (>=3,<4)", "pytz (==2022.1)", "snapshottest (>=0.6,<1)"] +test = ["coveralls (>=3.3,<4)", "iso8601 (>=1,<2)", "mock (>=4,<5)", "pytest (>=6,<7)", "pytest-asyncio (>=0.16,<2)", "pytest-benchmark (>=3.4,<4)", "pytest-cov (>=3,<4)", "pytest-mock (>=3,<4)", "pytz (==2022.1)", "snapshottest (>=0.6,<1)"] + +[[package]] +name = "graphql-core" +version = "3.2.3" +description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, + {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, +] + +[[package]] +name = "graphql-relay" +version = "3.2.0" +description = "Relay library for graphql-core" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "graphql-relay-3.2.0.tar.gz", hash = "sha256:1ff1c51298356e481a0be009ccdff249832ce53f30559c1338f22a0e0d17250c"}, + {file = "graphql_relay-3.2.0-py3-none-any.whl", hash = "sha256:c9b22bd28b170ba1fe674c74384a8ff30a76c8e26f88ac3aa1584dd3179953e5"}, +] + +[package.dependencies] +graphql-core = ">=3.2,<3.3" + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "gunicorn" +version = "22.0.0" +description = "WSGI HTTP Server for UNIX" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"}, + {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"}, +] + +[package.dependencies] +packaging = "*" + +[package.extras] +eventlet = ["eventlet (>=0.24.1,!=0.36.0)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"] +tornado = ["tornado (>=0.2)"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.2.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.2.1-py3-none-any.whl", hash = "sha256:ffef94b0b66046dd8ea2d619b701fe978d9264d38f3998bc4c27ec3b146a87c8"}, + {file = "importlib_metadata-7.2.1.tar.gz", hash = "sha256:509ecb2ab77071db5137c655e24ceb3eee66e7bbc6574165d0d114d9fc4bbe68"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "importlib-resources" +version = "6.4.2" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.2-py3-none-any.whl", hash = "sha256:8bba8c54a8a3afaa1419910845fa26ebd706dc716dd208d9b158b4b6966f5c5c"}, + {file = "importlib_resources-6.4.2.tar.gz", hash = "sha256:6cbfbefc449cc6e2095dd184691b7a12a04f40bc75dd4c55d31c34f174cdf57a"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "itsdangerous" +version = "2.2.0" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.8" +files = [ + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.4.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, + {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "3.0.0" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, +] + +[[package]] +name = "kiwisolver" +version = "1.4.5" +description = "A fast implementation of the Cassowary constraint solver" +optional = false +python-versions = ">=3.7" +files = [ + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, + {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, +] + +[[package]] +name = "langchain-core" +version = "0.2.30" +description = "Building applications with LLMs through composability" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [] +develop = true + +[package.dependencies] +jsonpatch = "^1.33" +langsmith = "^0.1.75" +packaging = ">=23.2,<25" +pydantic = {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""} +PyYAML = ">=5.3" +tenacity = "^8.1.0,!=8.4.0" +typing-extensions = ">=4.7" + +[package.source] +type = "directory" +url = "../../core" + +[[package]] +name = "langsmith" +version = "0.1.99" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langsmith-0.1.99-py3-none-any.whl", hash = "sha256:ef8d1d74a2674c514aa429b0171a9fbb661207dc3835142cca0e8f1bf97b26b0"}, + {file = "langsmith-0.1.99.tar.gz", hash = "sha256:b5c6a1f158abda61600a4a445081ee848b4a28b758d91f2793dc02aeffafcaf1"}, +] + +[package.dependencies] +orjson = ">=3.9.14,<4.0.0" +pydantic = {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""} +requests = ">=2,<3" + +[[package]] +name = "mako" +version = "1.3.5" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markdown" +version = "3.6" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "matplotlib" +version = "3.7.5" +description = "Python plotting package" +optional = false +python-versions = ">=3.8" +files = [ + {file = "matplotlib-3.7.5-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:4a87b69cb1cb20943010f63feb0b2901c17a3b435f75349fd9865713bfa63925"}, + {file = "matplotlib-3.7.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d3ce45010fefb028359accebb852ca0c21bd77ec0f281952831d235228f15810"}, + {file = "matplotlib-3.7.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbea1e762b28400393d71be1a02144aa16692a3c4c676ba0178ce83fc2928fdd"}, + {file = "matplotlib-3.7.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec0e1adc0ad70ba8227e957551e25a9d2995e319c29f94a97575bb90fa1d4469"}, + {file = "matplotlib-3.7.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6738c89a635ced486c8a20e20111d33f6398a9cbebce1ced59c211e12cd61455"}, + {file = "matplotlib-3.7.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1210b7919b4ed94b5573870f316bca26de3e3b07ffdb563e79327dc0e6bba515"}, + {file = "matplotlib-3.7.5-cp310-cp310-win32.whl", hash = "sha256:068ebcc59c072781d9dcdb82f0d3f1458271c2de7ca9c78f5bd672141091e9e1"}, + {file = "matplotlib-3.7.5-cp310-cp310-win_amd64.whl", hash = "sha256:f098ffbaab9df1e3ef04e5a5586a1e6b1791380698e84938d8640961c79b1fc0"}, + {file = "matplotlib-3.7.5-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:f65342c147572673f02a4abec2d5a23ad9c3898167df9b47c149f32ce61ca078"}, + {file = "matplotlib-3.7.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4ddf7fc0e0dc553891a117aa083039088d8a07686d4c93fb8a810adca68810af"}, + {file = "matplotlib-3.7.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0ccb830fc29442360d91be48527809f23a5dcaee8da5f4d9b2d5b867c1b087b8"}, + {file = "matplotlib-3.7.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efc6bb28178e844d1f408dd4d6341ee8a2e906fc9e0fa3dae497da4e0cab775d"}, + {file = "matplotlib-3.7.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b15c4c2d374f249f324f46e883340d494c01768dd5287f8bc00b65b625ab56c"}, + {file = "matplotlib-3.7.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d028555421912307845e59e3de328260b26d055c5dac9b182cc9783854e98fb"}, + {file = "matplotlib-3.7.5-cp311-cp311-win32.whl", hash = "sha256:fe184b4625b4052fa88ef350b815559dd90cc6cc8e97b62f966e1ca84074aafa"}, + {file = "matplotlib-3.7.5-cp311-cp311-win_amd64.whl", hash = "sha256:084f1f0f2f1010868c6f1f50b4e1c6f2fb201c58475494f1e5b66fed66093647"}, + {file = "matplotlib-3.7.5-cp312-cp312-macosx_10_12_universal2.whl", hash = "sha256:34bceb9d8ddb142055ff27cd7135f539f2f01be2ce0bafbace4117abe58f8fe4"}, + {file = "matplotlib-3.7.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c5a2134162273eb8cdfd320ae907bf84d171de948e62180fa372a3ca7cf0f433"}, + {file = "matplotlib-3.7.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:039ad54683a814002ff37bf7981aa1faa40b91f4ff84149beb53d1eb64617980"}, + {file = "matplotlib-3.7.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d742ccd1b09e863b4ca58291728db645b51dab343eebb08d5d4b31b308296ce"}, + {file = "matplotlib-3.7.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:743b1c488ca6a2bc7f56079d282e44d236bf375968bfd1b7ba701fd4d0fa32d6"}, + {file = "matplotlib-3.7.5-cp312-cp312-win_amd64.whl", hash = "sha256:fbf730fca3e1f23713bc1fae0a57db386e39dc81ea57dc305c67f628c1d7a342"}, + {file = "matplotlib-3.7.5-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:cfff9b838531698ee40e40ea1a8a9dc2c01edb400b27d38de6ba44c1f9a8e3d2"}, + {file = "matplotlib-3.7.5-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:1dbcca4508bca7847fe2d64a05b237a3dcaec1f959aedb756d5b1c67b770c5ee"}, + {file = "matplotlib-3.7.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4cdf4ef46c2a1609a50411b66940b31778db1e4b73d4ecc2eaa40bd588979b13"}, + {file = "matplotlib-3.7.5-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:167200ccfefd1674b60e957186dfd9baf58b324562ad1a28e5d0a6b3bea77905"}, + {file = "matplotlib-3.7.5-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:53e64522934df6e1818b25fd48cf3b645b11740d78e6ef765fbb5fa5ce080d02"}, + {file = "matplotlib-3.7.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3e3bc79b2d7d615067bd010caff9243ead1fc95cf735c16e4b2583173f717eb"}, + {file = "matplotlib-3.7.5-cp38-cp38-win32.whl", hash = "sha256:6b641b48c6819726ed47c55835cdd330e53747d4efff574109fd79b2d8a13748"}, + {file = "matplotlib-3.7.5-cp38-cp38-win_amd64.whl", hash = "sha256:f0b60993ed3488b4532ec6b697059897891927cbfc2b8d458a891b60ec03d9d7"}, + {file = "matplotlib-3.7.5-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:090964d0afaff9c90e4d8de7836757e72ecfb252fb02884016d809239f715651"}, + {file = "matplotlib-3.7.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:9fc6fcfbc55cd719bc0bfa60bde248eb68cf43876d4c22864603bdd23962ba25"}, + {file = "matplotlib-3.7.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7cc3078b019bb863752b8b60e8b269423000f1603cb2299608231996bd9d54"}, + {file = "matplotlib-3.7.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e4e9a868e8163abaaa8259842d85f949a919e1ead17644fb77a60427c90473c"}, + {file = "matplotlib-3.7.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa7ebc995a7d747dacf0a717d0eb3aa0f0c6a0e9ea88b0194d3a3cd241a1500f"}, + {file = "matplotlib-3.7.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3785bfd83b05fc0e0c2ae4c4a90034fe693ef96c679634756c50fe6efcc09856"}, + {file = "matplotlib-3.7.5-cp39-cp39-win32.whl", hash = "sha256:29b058738c104d0ca8806395f1c9089dfe4d4f0f78ea765c6c704469f3fffc81"}, + {file = "matplotlib-3.7.5-cp39-cp39-win_amd64.whl", hash = "sha256:fd4028d570fa4b31b7b165d4a685942ae9cdc669f33741e388c01857d9723eab"}, + {file = "matplotlib-3.7.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2a9a3f4d6a7f88a62a6a18c7e6a84aedcaf4faf0708b4ca46d87b19f1b526f88"}, + {file = "matplotlib-3.7.5-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9b3fd853d4a7f008a938df909b96db0b454225f935d3917520305b90680579c"}, + {file = "matplotlib-3.7.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0ad550da9f160737d7890217c5eeed4337d07e83ca1b2ca6535078f354e7675"}, + {file = "matplotlib-3.7.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:20da7924a08306a861b3f2d1da0d1aa9a6678e480cf8eacffe18b565af2813e7"}, + {file = "matplotlib-3.7.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b45c9798ea6bb920cb77eb7306409756a7fab9db9b463e462618e0559aecb30e"}, + {file = "matplotlib-3.7.5-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a99866267da1e561c7776fe12bf4442174b79aac1a47bd7e627c7e4d077ebd83"}, + {file = "matplotlib-3.7.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b6aa62adb6c268fc87d80f963aca39c64615c31830b02697743c95590ce3fbb"}, + {file = "matplotlib-3.7.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e530ab6a0afd082d2e9c17eb1eb064a63c5b09bb607b2b74fa41adbe3e162286"}, + {file = "matplotlib-3.7.5.tar.gz", hash = "sha256:1e5c971558ebc811aa07f54c7b7c677d78aa518ef4c390e14673a09e0860184a"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} +kiwisolver = ">=1.0.1" +numpy = ">=1.20,<2" +packaging = ">=20.0" +pillow = ">=6.2.0" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[[package]] +name = "mlflow" +version = "2.15.1" +description = "MLflow is an open source platform for the complete machine learning lifecycle" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mlflow-2.15.1-py3-none-any.whl", hash = "sha256:f998b8ec9df9199284f52e79ea5dd0b2b76b327ed7f060531e44f1ecd197c5d9"}, + {file = "mlflow-2.15.1.tar.gz", hash = "sha256:88da13f547cedce992d4614a4547f44bcdaf369e893179dd9a8bfa60338011bf"}, +] + +[package.dependencies] +alembic = "<1.10.0 || >1.10.0,<2" +docker = ">=4.0.0,<8" +Flask = "<4" +graphene = "<4" +gunicorn = {version = "<23", markers = "platform_system != \"Windows\""} +Jinja2 = [ + {version = ">=2.11,<4", markers = "platform_system != \"Windows\""}, + {version = ">=3.0,<4", markers = "platform_system == \"Windows\""}, +] +markdown = ">=3.3,<4" +matplotlib = "<4" +mlflow-skinny = "2.15.1" +numpy = "<2" +pandas = "<3" +pyarrow = ">=4.0.0,<16" +querystring-parser = "<2" +scikit-learn = "<2" +scipy = "<2" +sqlalchemy = ">=1.4.0,<3" +waitress = {version = "<4", markers = "platform_system == \"Windows\""} + +[package.extras] +aliyun-oss = ["aliyunstoreplugin"] +databricks = ["azure-storage-file-datalake (>12)", "boto3 (>1)", "botocore", "google-cloud-storage (>=1.30.0)"] +extras = ["azureml-core (>=1.2.0)", "boto3", "botocore", "google-cloud-storage (>=1.30.0)", "kubernetes", "mlserver (>=1.2.0,!=1.3.1,<1.4.0)", "mlserver-mlflow (>=1.2.0,!=1.3.1,<1.4.0)", "prometheus-flask-exporter", "pyarrow", "pysftp", "requests-auth-aws-sigv4", "virtualenv"] +gateway = ["aiohttp (<4)", "boto3 (>=1.28.56,<2)", "fastapi (<1)", "pydantic (>=1.0,<3)", "slowapi (>=0.1.9,<1)", "tiktoken (<1)", "uvicorn[standard] (<1)", "watchfiles (<1)"] +genai = ["aiohttp (<4)", "boto3 (>=1.28.56,<2)", "fastapi (<1)", "pydantic (>=1.0,<3)", "slowapi (>=0.1.9,<1)", "tiktoken (<1)", "uvicorn[standard] (<1)", "watchfiles (<1)"] +jfrog = ["mlflow-jfrog-plugin"] +langchain = ["langchain (>=0.1.0,<=0.2.11)"] +sqlserver = ["mlflow-dbstore"] +xethub = ["mlflow-xethub"] + +[[package]] +name = "mlflow-skinny" +version = "2.15.1" +description = "MLflow is an open source platform for the complete machine learning lifecycle" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mlflow_skinny-2.15.1-py3-none-any.whl", hash = "sha256:a48c6f56106b104dc7221bad91af75a150b927d15210a41928cc8ecba086470a"}, + {file = "mlflow_skinny-2.15.1.tar.gz", hash = "sha256:302f49757ffc8bdfc517b06f5252a02634203fec5e5ce95ad876a36af8403907"}, +] + +[package.dependencies] +cachetools = ">=5.0.0,<6" +click = ">=7.0,<9" +cloudpickle = "<4" +databricks-sdk = ">=0.20.0,<1" +entrypoints = "<1" +gitpython = ">=3.1.9,<4" +importlib-metadata = ">=3.7.0,<4.7.0 || >4.7.0,<8" +opentelemetry-api = ">=1.9.0,<3" +opentelemetry-sdk = ">=1.9.0,<3" +packaging = "<25" +protobuf = ">=3.12.0,<6" +pytz = "<2025" +pyyaml = ">=5.1,<7" +requests = ">=2.17.3,<3" +sqlparse = ">=0.4.0,<1" + +[package.extras] +aliyun-oss = ["aliyunstoreplugin"] +databricks = ["azure-storage-file-datalake (>12)", "boto3 (>1)", "botocore", "google-cloud-storage (>=1.30.0)"] +extras = ["azureml-core (>=1.2.0)", "boto3", "botocore", "google-cloud-storage (>=1.30.0)", "kubernetes", "mlserver (>=1.2.0,!=1.3.1,<1.4.0)", "mlserver-mlflow (>=1.2.0,!=1.3.1,<1.4.0)", "prometheus-flask-exporter", "pyarrow", "pysftp", "requests-auth-aws-sigv4", "virtualenv"] +gateway = ["aiohttp (<4)", "boto3 (>=1.28.56,<2)", "fastapi (<1)", "pydantic (>=1.0,<3)", "slowapi (>=0.1.9,<1)", "tiktoken (<1)", "uvicorn[standard] (<1)", "watchfiles (<1)"] +genai = ["aiohttp (<4)", "boto3 (>=1.28.56,<2)", "fastapi (<1)", "pydantic (>=1.0,<3)", "slowapi (>=0.1.9,<1)", "tiktoken (<1)", "uvicorn[standard] (<1)", "watchfiles (<1)"] +jfrog = ["mlflow-jfrog-plugin"] +langchain = ["langchain (>=0.1.0,<=0.2.11)"] +sqlserver = ["mlflow-dbstore"] +xethub = ["mlflow-xethub"] + +[[package]] +name = "mypy" +version = "1.11.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, + {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, + {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, + {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, + {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, + {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, + {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, + {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, + {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, + {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, + {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, + {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, + {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, + {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, + {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, + {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, + {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, + {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, + {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "numpy" +version = "1.24.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, + {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, + {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, + {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, + {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, + {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, + {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, + {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, + {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, + {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +] + +[[package]] +name = "opentelemetry-api" +version = "1.26.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_api-1.26.0-py3-none-any.whl", hash = "sha256:7d7ea33adf2ceda2dd680b18b1677e4152000b37ca76e679da71ff103b943064"}, + {file = "opentelemetry_api-1.26.0.tar.gz", hash = "sha256:2bd639e4bed5b18486fef0b5a520aaffde5a18fc225e808a1ac4df363f43a1ce"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<=8.0.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.26.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_sdk-1.26.0-py3-none-any.whl", hash = "sha256:feb5056a84a88670c041ea0ded9921fca559efec03905dddeb3885525e0af897"}, + {file = "opentelemetry_sdk-1.26.0.tar.gz", hash = "sha256:c90d2868f8805619535c05562d699e2f4fb1f00dbd55a86dcefca4da6fa02f85"}, +] + +[package.dependencies] +opentelemetry-api = "1.26.0" +opentelemetry-semantic-conventions = "0.47b0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.47b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_semantic_conventions-0.47b0-py3-none-any.whl", hash = "sha256:4ff9d595b85a59c1c1413f02bba320ce7ea6bf9e2ead2b0913c4395c7bbc1063"}, + {file = "opentelemetry_semantic_conventions-0.47b0.tar.gz", hash = "sha256:a8d57999bbe3495ffd4d510de26a97dadc1dace53e0275001b2c1b2f67992a7e"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +opentelemetry-api = "1.26.0" + +[[package]] +name = "orjson" +version = "3.10.7" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84"}, + {file = "orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175"}, + {file = "orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c"}, + {file = "orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0"}, + {file = "orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f"}, + {file = "orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5"}, + {file = "orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b"}, + {file = "orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb"}, + {file = "orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1"}, + {file = "orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149"}, + {file = "orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad"}, + {file = "orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2"}, + {file = "orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024"}, + {file = "orjson-3.10.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866"}, + {file = "orjson-3.10.7-cp38-none-win32.whl", hash = "sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c"}, + {file = "orjson-3.10.7-cp38-none-win_amd64.whl", hash = "sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e"}, + {file = "orjson-3.10.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5"}, + {file = "orjson-3.10.7-cp39-none-win32.whl", hash = "sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2"}, + {file = "orjson-3.10.7-cp39-none-win_amd64.whl", hash = "sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58"}, + {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pandas" +version = "2.0.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, + {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, + {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, + {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, + {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, + {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, + {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, + {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, + {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, + {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, + {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.1" + +[package.extras] +all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] +aws = ["s3fs (>=2021.08.0)"] +clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] +compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] +computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2021.07.0)"] +gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] +hdf5 = ["tables (>=3.6.1)"] +html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] +mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] +spss = ["pyreadstat (>=1.1.2)"] +sql-other = ["SQLAlchemy (>=1.4.16)"] +test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.6.3)"] + +[[package]] +name = "pillow" +version = "10.4.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, + {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, + {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, + {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, + {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, + {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, + {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, + {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, + {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, + {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, + {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, + {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, + {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, + {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, + {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, + {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, + {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, + {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, + {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, + {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "protobuf" +version = "5.27.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-5.27.3-cp310-abi3-win32.whl", hash = "sha256:dcb307cd4ef8fec0cf52cb9105a03d06fbb5275ce6d84a6ae33bc6cf84e0a07b"}, + {file = "protobuf-5.27.3-cp310-abi3-win_amd64.whl", hash = "sha256:16ddf3f8c6c41e1e803da7abea17b1793a97ef079a912e42351eabb19b2cffe7"}, + {file = "protobuf-5.27.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:68248c60d53f6168f565a8c76dc58ba4fa2ade31c2d1ebdae6d80f969cdc2d4f"}, + {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b8a994fb3d1c11156e7d1e427186662b64694a62b55936b2b9348f0a7c6625ce"}, + {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:a55c48f2a2092d8e213bd143474df33a6ae751b781dd1d1f4d953c128a415b25"}, + {file = "protobuf-5.27.3-cp38-cp38-win32.whl", hash = "sha256:043853dcb55cc262bf2e116215ad43fa0859caab79bb0b2d31b708f128ece035"}, + {file = "protobuf-5.27.3-cp38-cp38-win_amd64.whl", hash = "sha256:c2a105c24f08b1e53d6c7ffe69cb09d0031512f0b72f812dd4005b8112dbe91e"}, + {file = "protobuf-5.27.3-cp39-cp39-win32.whl", hash = "sha256:c84eee2c71ed83704f1afbf1a85c3171eab0fd1ade3b399b3fad0884cbcca8bf"}, + {file = "protobuf-5.27.3-cp39-cp39-win_amd64.whl", hash = "sha256:af7c0b7cfbbb649ad26132e53faa348580f844d9ca46fd3ec7ca48a1ea5db8a1"}, + {file = "protobuf-5.27.3-py3-none-any.whl", hash = "sha256:8572c6533e544ebf6899c360e91d6bcbbee2549251643d32c52cf8a5de295ba5"}, + {file = "protobuf-5.27.3.tar.gz", hash = "sha256:82460903e640f2b7e34ee81a947fdaad89de796d324bcbc38ff5430bcdead82c"}, +] + +[[package]] +name = "pyarrow" +version = "15.0.2" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, + {file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, + {file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, + {file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, + {file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, + {file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, + {file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, + {file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, +] + +[package.dependencies] +numpy = ">=1.16.6,<2" + +[[package]] +name = "pyasn1" +version = "0.6.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.7.0" + +[[package]] +name = "pydantic" +version = "2.8.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.20.1" +typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.20.1" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.23.8" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, + {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-socket" +version = "0.7.0" +description = "Pytest Plugin to disable socket calls during tests" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pytest_socket-0.7.0-py3-none-any.whl", hash = "sha256:7e0f4642177d55d317bbd58fc68c6bd9048d6eadb2d46a89307fa9221336ce45"}, + {file = "pytest_socket-0.7.0.tar.gz", hash = "sha256:71ab048cbbcb085c15a4423b73b619a8b35d6a307f46f78ea46be51b1b7e11b3"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "querystring-parser" +version = "1.2.4" +description = "QueryString parser for Python/Django that correctly handles nested dictionaries" +optional = false +python-versions = "*" +files = [ + {file = "querystring_parser-1.2.4-py2.py3-none-any.whl", hash = "sha256:d2fa90765eaf0de96c8b087872991a10238e89ba015ae59fedfed6bd61c242a0"}, + {file = "querystring_parser-1.2.4.tar.gz", hash = "sha256:644fce1cffe0530453b43a83a38094dbe422ccba8c9b2f2a1c00280e14ca8a62"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "ruff" +version = "0.5.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, + {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, + {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, + {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, + {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, + {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, + {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, +] + +[[package]] +name = "scikit-learn" +version = "1.3.2" +description = "A set of python modules for machine learning and data mining" +optional = false +python-versions = ">=3.8" +files = [ + {file = "scikit-learn-1.3.2.tar.gz", hash = "sha256:a2f54c76accc15a34bfb9066e6c7a56c1e7235dda5762b990792330b52ccfb05"}, + {file = "scikit_learn-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e326c0eb5cf4d6ba40f93776a20e9a7a69524c4db0757e7ce24ba222471ee8a1"}, + {file = "scikit_learn-1.3.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:535805c2a01ccb40ca4ab7d081d771aea67e535153e35a1fd99418fcedd1648a"}, + {file = "scikit_learn-1.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1215e5e58e9880b554b01187b8c9390bf4dc4692eedeaf542d3273f4785e342c"}, + {file = "scikit_learn-1.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ee107923a623b9f517754ea2f69ea3b62fc898a3641766cb7deb2f2ce450161"}, + {file = "scikit_learn-1.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:35a22e8015048c628ad099da9df5ab3004cdbf81edc75b396fd0cff8699ac58c"}, + {file = "scikit_learn-1.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6fb6bc98f234fda43163ddbe36df8bcde1d13ee176c6dc9b92bb7d3fc842eb66"}, + {file = "scikit_learn-1.3.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:18424efee518a1cde7b0b53a422cde2f6625197de6af36da0b57ec502f126157"}, + {file = "scikit_learn-1.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3271552a5eb16f208a6f7f617b8cc6d1f137b52c8a1ef8edf547db0259b2c9fb"}, + {file = "scikit_learn-1.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4144a5004a676d5022b798d9e573b05139e77f271253a4703eed295bde0433"}, + {file = "scikit_learn-1.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:67f37d708f042a9b8d59551cf94d30431e01374e00dc2645fa186059c6c5d78b"}, + {file = "scikit_learn-1.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8db94cd8a2e038b37a80a04df8783e09caac77cbe052146432e67800e430c028"}, + {file = "scikit_learn-1.3.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:61a6efd384258789aa89415a410dcdb39a50e19d3d8410bd29be365bcdd512d5"}, + {file = "scikit_learn-1.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb06f8dce3f5ddc5dee1715a9b9f19f20d295bed8e3cd4fa51e1d050347de525"}, + {file = "scikit_learn-1.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b2de18d86f630d68fe1f87af690d451388bb186480afc719e5f770590c2ef6c"}, + {file = "scikit_learn-1.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:0402638c9a7c219ee52c94cbebc8fcb5eb9fe9c773717965c1f4185588ad3107"}, + {file = "scikit_learn-1.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a19f90f95ba93c1a7f7924906d0576a84da7f3b2282ac3bfb7a08a32801add93"}, + {file = "scikit_learn-1.3.2-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:b8692e395a03a60cd927125eef3a8e3424d86dde9b2370d544f0ea35f78a8073"}, + {file = "scikit_learn-1.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15e1e94cc23d04d39da797ee34236ce2375ddea158b10bee3c343647d615581d"}, + {file = "scikit_learn-1.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:785a2213086b7b1abf037aeadbbd6d67159feb3e30263434139c98425e3dcfcf"}, + {file = "scikit_learn-1.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:64381066f8aa63c2710e6b56edc9f0894cc7bf59bd71b8ce5613a4559b6145e0"}, + {file = "scikit_learn-1.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6c43290337f7a4b969d207e620658372ba3c1ffb611f8bc2b6f031dc5c6d1d03"}, + {file = "scikit_learn-1.3.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:dc9002fc200bed597d5d34e90c752b74df516d592db162f756cc52836b38fe0e"}, + {file = "scikit_learn-1.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d08ada33e955c54355d909b9c06a4789a729977f165b8bae6f225ff0a60ec4a"}, + {file = "scikit_learn-1.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763f0ae4b79b0ff9cca0bf3716bcc9915bdacff3cebea15ec79652d1cc4fa5c9"}, + {file = "scikit_learn-1.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:ed932ea780517b00dae7431e031faae6b49b20eb6950918eb83bd043237950e0"}, +] + +[package.dependencies] +joblib = ">=1.1.1" +numpy = ">=1.17.3,<2.0" +scipy = ">=1.5.0" +threadpoolctl = ">=2.0.0" + +[package.extras] +benchmark = ["matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "pandas (>=1.0.5)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.10.1)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] +examples = ["matplotlib (>=3.1.3)", "pandas (>=1.0.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)"] +tests = ["black (>=23.3.0)", "matplotlib (>=3.1.3)", "mypy (>=1.3)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.0.272)", "scikit-image (>=0.16.2)"] + +[[package]] +name = "scipy" +version = "1.10.1" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = "<3.12,>=3.8" +files = [ + {file = "scipy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7354fd7527a4b0377ce55f286805b34e8c54b91be865bac273f527e1b839019"}, + {file = "scipy-1.10.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4b3f429188c66603a1a5c549fb414e4d3bdc2a24792e061ffbd607d3d75fd84e"}, + {file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1553b5dcddd64ba9a0d95355e63fe6c3fc303a8fd77c7bc91e77d61363f7433f"}, + {file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c0ff64b06b10e35215abce517252b375e580a6125fd5fdf6421b98efbefb2d2"}, + {file = "scipy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:fae8a7b898c42dffe3f7361c40d5952b6bf32d10c4569098d276b4c547905ee1"}, + {file = "scipy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f1564ea217e82c1bbe75ddf7285ba0709ecd503f048cb1236ae9995f64217bd"}, + {file = "scipy-1.10.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d925fa1c81b772882aa55bcc10bf88324dadb66ff85d548c71515f6689c6dac5"}, + {file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaea0a6be54462ec027de54fca511540980d1e9eea68b2d5c1dbfe084797be35"}, + {file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15a35c4242ec5f292c3dd364a7c71a61be87a3d4ddcc693372813c0b73c9af1d"}, + {file = "scipy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:43b8e0bcb877faf0abfb613d51026cd5cc78918e9530e375727bf0625c82788f"}, + {file = "scipy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5678f88c68ea866ed9ebe3a989091088553ba12c6090244fdae3e467b1139c35"}, + {file = "scipy-1.10.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:39becb03541f9e58243f4197584286e339029e8908c46f7221abeea4b749fa88"}, + {file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bce5869c8d68cf383ce240e44c1d9ae7c06078a9396df68ce88a1230f93a30c1"}, + {file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07c3457ce0b3ad5124f98a86533106b643dd811dd61b548e78cf4c8786652f6f"}, + {file = "scipy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:049a8bbf0ad95277ffba9b3b7d23e5369cc39e66406d60422c8cfef40ccc8415"}, + {file = "scipy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd9f1027ff30d90618914a64ca9b1a77a431159df0e2a195d8a9e8a04c78abf9"}, + {file = "scipy-1.10.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:79c8e5a6c6ffaf3a2262ef1be1e108a035cf4f05c14df56057b64acc5bebffb6"}, + {file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51af417a000d2dbe1ec6c372dfe688e041a7084da4fdd350aeb139bd3fb55353"}, + {file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b4735d6c28aad3cdcf52117e0e91d6b39acd4272f3f5cd9907c24ee931ad601"}, + {file = "scipy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ff7f37b1bf4417baca958d254e8e2875d0cc23aaadbe65b3d5b3077b0eb23ea"}, + {file = "scipy-1.10.1.tar.gz", hash = "sha256:2cf9dfb80a7b4589ba4c40ce7588986d6d5cebc5457cad2c2880f6bc2d42f3a5"}, +] + +[package.dependencies] +numpy = ">=1.19.5,<1.27.0" + +[package.extras] +dev = ["click", "doit (>=0.36.0)", "flake8", "mypy", "pycodestyle", "pydevtool", "rich-click", "typing_extensions"] +doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] +test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.32" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c9045ecc2e4db59bfc97b20516dfdf8e41d910ac6fb667ebd3a79ea54084619"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1467940318e4a860afd546ef61fefb98a14d935cd6817ed07a228c7f7c62f389"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5954463675cb15db8d4b521f3566a017c8789222b8316b1e6934c811018ee08b"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167e7497035c303ae50651b351c28dc22a40bb98fbdb8468cdc971821b1ae533"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b27dfb676ac02529fb6e343b3a482303f16e6bc3a4d868b73935b8792edb52d0"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bf2360a5e0f7bd75fa80431bf8ebcfb920c9f885e7956c7efde89031695cafb8"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-win32.whl", hash = "sha256:306fe44e754a91cd9d600a6b070c1f2fadbb4a1a257b8781ccf33c7067fd3e4d"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-win_amd64.whl", hash = "sha256:99db65e6f3ab42e06c318f15c98f59a436f1c78179e6a6f40f529c8cc7100b22"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21b053be28a8a414f2ddd401f1be8361e41032d2ef5884b2f31d31cb723e559f"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b178e875a7a25b5938b53b006598ee7645172fccafe1c291a706e93f48499ff5"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723a40ee2cc7ea653645bd4cf024326dea2076673fc9d3d33f20f6c81db83e1d"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295ff8689544f7ee7e819529633d058bd458c1fd7f7e3eebd0f9268ebc56c2a0"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49496b68cd190a147118af585173ee624114dfb2e0297558c460ad7495f9dfe2"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:acd9b73c5c15f0ec5ce18128b1fe9157ddd0044abc373e6ecd5ba376a7e5d961"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-win32.whl", hash = "sha256:9365a3da32dabd3e69e06b972b1ffb0c89668994c7e8e75ce21d3e5e69ddef28"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-win_amd64.whl", hash = "sha256:8bd63d051f4f313b102a2af1cbc8b80f061bf78f3d5bd0843ff70b5859e27924"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bab3db192a0c35e3c9d1560eb8332463e29e5507dbd822e29a0a3c48c0a8d92"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:19d98f4f58b13900d8dec4ed09dd09ef292208ee44cc9c2fe01c1f0a2fe440e9"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd33c61513cb1b7371fd40cf221256456d26a56284e7d19d1f0b9f1eb7dd7e8"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6ba0497c1d066dd004e0f02a92426ca2df20fac08728d03f67f6960271feec"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2b6be53e4fde0065524f1a0a7929b10e9280987b320716c1509478b712a7688c"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:916a798f62f410c0b80b63683c8061f5ebe237b0f4ad778739304253353bc1cb"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-win32.whl", hash = "sha256:31983018b74908ebc6c996a16ad3690301a23befb643093fcfe85efd292e384d"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-win_amd64.whl", hash = "sha256:4363ed245a6231f2e2957cccdda3c776265a75851f4753c60f3004b90e69bfeb"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8afd5b26570bf41c35c0121801479958b4446751a3971fb9a480c1afd85558e"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c750987fc876813f27b60d619b987b057eb4896b81117f73bb8d9918c14f1cad"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada0102afff4890f651ed91120c1120065663506b760da4e7823913ebd3258be"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:78c03d0f8a5ab4f3034c0e8482cfcc415a3ec6193491cfa1c643ed707d476f16"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:3bd1cae7519283ff525e64645ebd7a3e0283f3c038f461ecc1c7b040a0c932a1"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-win32.whl", hash = "sha256:01438ebcdc566d58c93af0171c74ec28efe6a29184b773e378a385e6215389da"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-win_amd64.whl", hash = "sha256:4979dc80fbbc9d2ef569e71e0896990bc94df2b9fdbd878290bd129b65ab579c"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c742be912f57586ac43af38b3848f7688863a403dfb220193a882ea60e1ec3a"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:62e23d0ac103bcf1c5555b6c88c114089587bc64d048fef5bbdb58dfd26f96da"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:251f0d1108aab8ea7b9aadbd07fb47fb8e3a5838dde34aa95a3349876b5a1f1d"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef18a84e5116340e38eca3e7f9eeaaef62738891422e7c2a0b80feab165905f"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3eb6a97a1d39976f360b10ff208c73afb6a4de86dd2a6212ddf65c4a6a2347d5"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0c1c9b673d21477cec17ab10bc4decb1322843ba35b481585facd88203754fc5"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-win32.whl", hash = "sha256:c41a2b9ca80ee555decc605bd3c4520cc6fef9abde8fd66b1cf65126a6922d65"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-win_amd64.whl", hash = "sha256:8a37e4d265033c897892279e8adf505c8b6b4075f2b40d77afb31f7185cd6ecd"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:52fec964fba2ef46476312a03ec8c425956b05c20220a1a03703537824b5e8e1"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:328429aecaba2aee3d71e11f2477c14eec5990fb6d0e884107935f7fb6001632"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85a01b5599e790e76ac3fe3aa2f26e1feba56270023d6afd5550ed63c68552b3"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf04784797dcdf4c0aa952c8d234fa01974c4729db55c45732520ce12dd95b4"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4488120becf9b71b3ac718f4138269a6be99a42fe023ec457896ba4f80749525"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14e09e083a5796d513918a66f3d6aedbc131e39e80875afe81d98a03312889e6"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-win32.whl", hash = "sha256:0d322cc9c9b2154ba7e82f7bf25ecc7c36fbe2d82e2933b3642fc095a52cfc78"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-win_amd64.whl", hash = "sha256:7dd8583df2f98dea28b5cd53a1beac963f4f9d087888d75f22fcc93a07cf8d84"}, + {file = "SQLAlchemy-2.0.32-py3-none-any.whl", hash = "sha256:e567a8793a692451f706b363ccf3c45e056b67d90ead58c3bc9471af5d212202"}, + {file = "SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "sqlparse" +version = "0.5.1" +description = "A non-validating SQL parser." +optional = false +python-versions = ">=3.8" +files = [ + {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"}, + {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"}, +] + +[package.extras] +dev = ["build", "hatch"] +doc = ["sphinx"] + +[[package]] +name = "tenacity" +version = "8.5.0" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, + {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + +[[package]] +name = "threadpoolctl" +version = "3.5.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, + {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "waitress" +version = "3.0.0" +description = "Waitress WSGI server" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "waitress-3.0.0-py3-none-any.whl", hash = "sha256:2a06f242f4ba0cc563444ca3d1998959447477363a2d7e9b8b4d75d35cfd1669"}, + {file = "waitress-3.0.0.tar.gz", hash = "sha256:005da479b04134cdd9dd602d1ee7c49d79de0537610d653674cc6cbde222b8a1"}, +] + +[package.extras] +docs = ["Sphinx (>=1.8.1)", "docutils", "pylons-sphinx-themes (>=1.0.9)"] +testing = ["coverage (>=5.0)", "pytest", "pytest-cov"] + +[[package]] +name = "werkzeug" +version = "3.0.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, + {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "zipp" +version = "3.20.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, +] + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.8.1,<3.12" +content-hash = "6da52f0e39bc7da1a80cc181bdd481e57f4644daf2f3c6da6a6b0ead2e813be9" diff --git a/libs/partners/databricks/pyproject.toml b/libs/partners/databricks/pyproject.toml new file mode 100644 index 0000000000000..cdea854df91a8 --- /dev/null +++ b/libs/partners/databricks/pyproject.toml @@ -0,0 +1,99 @@ +[tool.poetry] +name = "langchain-databricks" +version = "0.1.0" +description = "An integration package connecting Databricks and LangChain" +authors = [] +readme = "README.md" +repository = "https://github.com/langchain-ai/langchain" +license = "MIT" + +[tool.poetry.urls] +"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/databricks" +"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22databricks%3D%3D0%22&expanded=true" + +[tool.poetry.dependencies] +# TODO: Replace <3.12 to <4.0 once https://github.com/mlflow/mlflow/commit/04370119fcc1b2ccdbcd9a50198ab00566d58cd2 is released +python = ">=3.8.1,<3.12" +langchain-core = "^0.2.0" +mlflow = ">=2.9" + +# MLflow depends on following libraries, which require different version for Python 3.8 vs 3.12 +numpy = [ + {version = ">=1.26.0", python = ">=3.12"}, + {version = ">=1.24.0", python = "<3.12"}, +] +scipy = [ + {version = ">=1.11", python = ">=3.12"}, + {version = "<2", python = "<3.12"} +] + +[tool.poetry.group.test] +optional = true + +[tool.poetry.group.test.dependencies] +pytest = "^7.4.3" +pytest-asyncio = "^0.23.2" +pytest-socket = "^0.7.0" +langchain-core = { path = "../../core", develop = true } + +[tool.poetry.group.codespell] +optional = true + +[tool.poetry.group.codespell.dependencies] +codespell = "^2.2.6" + +[tool.poetry.group.test_integration] +optional = true + +[tool.poetry.group.test_integration.dependencies] + +[tool.poetry.group.lint] +optional = true + +[tool.poetry.group.lint.dependencies] +ruff = "^0.5" + +[tool.poetry.group.typing.dependencies] +mypy = "^1.10" +langchain-core = { path = "../../core", develop = true } + +[tool.poetry.group.dev] +optional = true + +[tool.poetry.group.dev.dependencies] +langchain-core = { path = "../../core", develop = true } + +[tool.ruff.lint] +select = [ + "E", # pycodestyle + "F", # pyflakes + "I", # isort + "T201", # print +] + +[tool.mypy] +disallow_untyped_defs = "True" + +[tool.coverage.run] +omit = ["tests/*"] + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +# --strict-markers will raise errors on unknown marks. +# https://docs.pytest.org/en/7.1.x/how-to/mark.html#raising-errors-on-unknown-marks +# +# https://docs.pytest.org/en/7.1.x/reference/reference.html +# --strict-config any warnings encountered while parsing the `pytest` +# section of the configuration file raise errors. +# +# https://github.com/tophat/syrupy +addopts = "--strict-markers --strict-config --durations=5" +# Registering custom markers. +# https://docs.pytest.org/en/7.1.x/example/markers.html#registering-markers +markers = [ + "compile: mark placeholder test used to compile integration tests without running them", +] +asyncio_mode = "auto" diff --git a/libs/partners/databricks/scripts/check_imports.py b/libs/partners/databricks/scripts/check_imports.py new file mode 100644 index 0000000000000..58a460c149353 --- /dev/null +++ b/libs/partners/databricks/scripts/check_imports.py @@ -0,0 +1,17 @@ +import sys +import traceback +from importlib.machinery import SourceFileLoader + +if __name__ == "__main__": + files = sys.argv[1:] + has_failure = False + for file in files: + try: + SourceFileLoader("x", file).load_module() + except Exception: + has_failure = True + print(file) # noqa: T201 + traceback.print_exc() + print() # noqa: T201 + + sys.exit(1 if has_failure else 0) diff --git a/libs/partners/databricks/scripts/check_pydantic.sh b/libs/partners/databricks/scripts/check_pydantic.sh new file mode 100755 index 0000000000000..06b5bb81ae236 --- /dev/null +++ b/libs/partners/databricks/scripts/check_pydantic.sh @@ -0,0 +1,27 @@ +#!/bin/bash +# +# This script searches for lines starting with "import pydantic" or "from pydantic" +# in tracked files within a Git repository. +# +# Usage: ./scripts/check_pydantic.sh /path/to/repository + +# Check if a path argument is provided +if [ $# -ne 1 ]; then + echo "Usage: $0 /path/to/repository" + exit 1 +fi + +repository_path="$1" + +# Search for lines matching the pattern within the specified repository +result=$(git -C "$repository_path" grep -E '^import pydantic|^from pydantic') + +# Check if any matching lines were found +if [ -n "$result" ]; then + echo "ERROR: The following lines need to be updated:" + echo "$result" + echo "Please replace the code with an import from langchain_core.pydantic_v1." + echo "For example, replace 'from pydantic import BaseModel'" + echo "with 'from langchain_core.pydantic_v1 import BaseModel'" + exit 1 +fi diff --git a/libs/partners/databricks/scripts/lint_imports.sh b/libs/partners/databricks/scripts/lint_imports.sh new file mode 100755 index 0000000000000..19ccec1480c01 --- /dev/null +++ b/libs/partners/databricks/scripts/lint_imports.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +set -eu + +# Initialize a variable to keep track of errors +errors=0 + +# make sure not importing from langchain, langchain_experimental, or langchain_community +git --no-pager grep '^from langchain\.' . && errors=$((errors+1)) +git --no-pager grep '^from langchain_experimental\.' . && errors=$((errors+1)) +git --no-pager grep '^from langchain_community\.' . && errors=$((errors+1)) + +# Decide on an exit status based on the errors +if [ "$errors" -gt 0 ]; then + exit 1 +else + exit 0 +fi diff --git a/libs/partners/databricks/tests/__init__.py b/libs/partners/databricks/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/databricks/tests/integration_tests/__init__.py b/libs/partners/databricks/tests/integration_tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/databricks/tests/integration_tests/test_compile.py b/libs/partners/databricks/tests/integration_tests/test_compile.py new file mode 100644 index 0000000000000..33ecccdfa0fbd --- /dev/null +++ b/libs/partners/databricks/tests/integration_tests/test_compile.py @@ -0,0 +1,7 @@ +import pytest + + +@pytest.mark.compile +def test_placeholder() -> None: + """Used for compiling integration tests without running any real tests.""" + pass diff --git a/libs/partners/databricks/tests/unit_tests/__init__.py b/libs/partners/databricks/tests/unit_tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/databricks/tests/unit_tests/test_chat_models.py b/libs/partners/databricks/tests/unit_tests/test_chat_models.py new file mode 100644 index 0000000000000..118d3022fcd01 --- /dev/null +++ b/libs/partners/databricks/tests/unit_tests/test_chat_models.py @@ -0,0 +1,321 @@ +"""Test chat model integration.""" + +import json +from typing import Generator +from unittest import mock + +import mlflow # type: ignore # noqa: F401 +import pytest +from langchain_core.messages import ( + AIMessage, + AIMessageChunk, + BaseMessage, + ChatMessage, + ChatMessageChunk, + FunctionMessage, + HumanMessage, + HumanMessageChunk, + SystemMessage, + SystemMessageChunk, + ToolMessageChunk, +) +from langchain_core.messages.tool import ToolCallChunk +from langchain_core.pydantic_v1 import BaseModel, Field + +from langchain_databricks.chat_models import ( + ChatDatabricks, + _convert_dict_to_message, + _convert_dict_to_message_chunk, + _convert_message_to_dict, +) + +_MOCK_CHAT_RESPONSE = { + "id": "chatcmpl_id", + "object": "chat.completion", + "created": 1721875529, + "model": "meta-llama-3.1-70b-instruct-072424", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "To calculate the result of 36939 multiplied by 8922.4, " + "I get:\n\n36939 x 8922.4 = 329,511,111.6", + }, + "finish_reason": "stop", + "logprobs": None, + } + ], + "usage": {"prompt_tokens": 30, "completion_tokens": 36, "total_tokens": 66}, +} + +_MOCK_STREAM_RESPONSE = [ + { + "id": "chatcmpl_bb1fce87-f14e-4ae1-ac22-89facc74898a", + "object": "chat.completion.chunk", + "created": 1721877054, + "model": "meta-llama-3.1-70b-instruct-072424", + "choices": [ + { + "index": 0, + "delta": {"role": "assistant", "content": "36939"}, + "finish_reason": None, + "logprobs": None, + } + ], + "usage": {"prompt_tokens": 30, "completion_tokens": 20, "total_tokens": 50}, + }, + { + "id": "chatcmpl_bb1fce87-f14e-4ae1-ac22-89facc74898a", + "object": "chat.completion.chunk", + "created": 1721877054, + "model": "meta-llama-3.1-70b-instruct-072424", + "choices": [ + { + "index": 0, + "delta": {"role": "assistant", "content": "x"}, + "finish_reason": None, + "logprobs": None, + } + ], + "usage": {"prompt_tokens": 30, "completion_tokens": 22, "total_tokens": 52}, + }, + { + "id": "chatcmpl_bb1fce87-f14e-4ae1-ac22-89facc74898a", + "object": "chat.completion.chunk", + "created": 1721877054, + "model": "meta-llama-3.1-70b-instruct-072424", + "choices": [ + { + "index": 0, + "delta": {"role": "assistant", "content": "8922.4"}, + "finish_reason": None, + "logprobs": None, + } + ], + "usage": {"prompt_tokens": 30, "completion_tokens": 24, "total_tokens": 54}, + }, + { + "id": "chatcmpl_bb1fce87-f14e-4ae1-ac22-89facc74898a", + "object": "chat.completion.chunk", + "created": 1721877054, + "model": "meta-llama-3.1-70b-instruct-072424", + "choices": [ + { + "index": 0, + "delta": {"role": "assistant", "content": " = "}, + "finish_reason": None, + "logprobs": None, + } + ], + "usage": {"prompt_tokens": 30, "completion_tokens": 28, "total_tokens": 58}, + }, + { + "id": "chatcmpl_bb1fce87-f14e-4ae1-ac22-89facc74898a", + "object": "chat.completion.chunk", + "created": 1721877054, + "model": "meta-llama-3.1-70b-instruct-072424", + "choices": [ + { + "index": 0, + "delta": {"role": "assistant", "content": "329,511,111.6"}, + "finish_reason": None, + "logprobs": None, + } + ], + "usage": {"prompt_tokens": 30, "completion_tokens": 30, "total_tokens": 60}, + }, + { + "id": "chatcmpl_bb1fce87-f14e-4ae1-ac22-89facc74898a", + "object": "chat.completion.chunk", + "created": 1721877054, + "model": "meta-llama-3.1-70b-instruct-072424", + "choices": [ + { + "index": 0, + "delta": {"role": "assistant", "content": ""}, + "finish_reason": "stop", + "logprobs": None, + } + ], + "usage": {"prompt_tokens": 30, "completion_tokens": 36, "total_tokens": 66}, + }, +] + + +@pytest.fixture(autouse=True) +def mock_client() -> Generator: + client = mock.MagicMock() + client.predict.return_value = _MOCK_CHAT_RESPONSE + client.predict_stream.return_value = _MOCK_STREAM_RESPONSE + with mock.patch("mlflow.deployments.get_deploy_client", return_value=client): + yield + + +@pytest.fixture +def llm() -> ChatDatabricks: + return ChatDatabricks( + endpoint="databricks-meta-llama-3-70b-instruct", target_uri="databricks" + ) + + +def test_chat_mlflow_predict(llm: ChatDatabricks) -> None: + res = llm.invoke( + [ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "36939 * 8922.4"}, + ] + ) + assert res.content == _MOCK_CHAT_RESPONSE["choices"][0]["message"]["content"] # type: ignore[index] + + +def test_chat_mlflow_stream(llm: ChatDatabricks) -> None: + res = llm.stream( + [ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "36939 * 8922.4"}, + ] + ) + for chunk, expected in zip(res, _MOCK_STREAM_RESPONSE): + assert chunk.content == expected["choices"][0]["delta"]["content"] # type: ignore[index] + + +def test_chat_mlflow_bind_tools(llm: ChatDatabricks) -> None: + class GetWeather(BaseModel): + """Get the current weather in a given location""" + + location: str = Field( + ..., description="The city and state, e.g. San Francisco, CA" + ) + + class GetPopulation(BaseModel): + """Get the current population in a given location""" + + location: str = Field( + ..., description="The city and state, e.g. San Francisco, CA" + ) + + llm_with_tools = llm.bind_tools([GetWeather, GetPopulation]) + response = llm_with_tools.invoke( + "Which city is hotter today and which is bigger: LA or NY?" + ) + assert isinstance(response, AIMessage) + + +### Test data conversion functions ### + + +@pytest.mark.parametrize( + ("role", "expected_output"), + [ + ("user", HumanMessage("foo")), + ("system", SystemMessage("foo")), + ("assistant", AIMessage("foo")), + ("any_role", ChatMessage(content="foo", role="any_role")), + ], +) +def test_convert_message(role: str, expected_output: BaseMessage) -> None: + message = {"role": role, "content": "foo"} + result = _convert_dict_to_message(message) + assert result == expected_output + + # convert back + dict_result = _convert_message_to_dict(result) + assert dict_result == message + + +def test_convert_message_with_tool_calls() -> None: + ID = "call_fb5f5e1a-bac0-4422-95e9-d06e6022ad12" + tool_calls = [ + { + "id": ID, + "type": "function", + "function": { + "name": "main__test__python_exec", + "arguments": '{"code": "result = 36939 * 8922.4"}', + }, + } + ] + message_with_tools = { + "role": "assistant", + "content": None, + "tool_calls": tool_calls, + "id": ID, + } + result = _convert_dict_to_message(message_with_tools) + expected_output = AIMessage( + content="", + additional_kwargs={"tool_calls": tool_calls}, + id=ID, + tool_calls=[ + { + "name": tool_calls[0]["function"]["name"], # type: ignore[index] + "args": json.loads(tool_calls[0]["function"]["arguments"]), # type: ignore[index] + "id": ID, + "type": "tool_call", + } + ], + ) + assert result == expected_output + + # convert back + dict_result = _convert_message_to_dict(result) + assert dict_result == message_with_tools + + +@pytest.mark.parametrize( + ("role", "expected_output"), + [ + ("user", HumanMessageChunk(content="foo")), + ("system", SystemMessageChunk(content="foo")), + ("assistant", AIMessageChunk(content="foo")), + ("any_role", ChatMessageChunk(content="foo", role="any_role")), + ], +) +def test_convert_message_chunk(role: str, expected_output: BaseMessage) -> None: + delta = {"role": role, "content": "foo"} + result = _convert_dict_to_message_chunk(delta, "default_role") + assert result == expected_output + + # convert back + dict_result = _convert_message_to_dict(result) + assert dict_result == delta + + +def test_convert_message_chunk_with_tool_calls() -> None: + delta_with_tools = { + "role": "assistant", + "content": None, + "tool_calls": [{"index": 0, "function": {"arguments": " }"}}], + } + result = _convert_dict_to_message_chunk(delta_with_tools, "role") + expected_output = AIMessageChunk( + content="", + additional_kwargs={"tool_calls": delta_with_tools["tool_calls"]}, + id=None, + tool_call_chunks=[ToolCallChunk(name=None, args=" }", id=None, index=0)], + ) + assert result == expected_output + + +def test_convert_tool_message_chunk() -> None: + delta = { + "role": "tool", + "content": "foo", + "tool_call_id": "tool_call_id", + "id": "some_id", + } + result = _convert_dict_to_message_chunk(delta, "default_role") + expected_output = ToolMessageChunk( + content="foo", id="some_id", tool_call_id="tool_call_id" + ) + assert result == expected_output + + # convert back + dict_result = _convert_message_to_dict(result) + assert dict_result == delta + + +def test_convert_message_to_dict_function() -> None: + with pytest.raises(ValueError, match="Function messages are not supported"): + _convert_message_to_dict(FunctionMessage(content="", name="name")) diff --git a/libs/partners/databricks/tests/unit_tests/test_imports.py b/libs/partners/databricks/tests/unit_tests/test_imports.py new file mode 100644 index 0000000000000..579123a8bbb06 --- /dev/null +++ b/libs/partners/databricks/tests/unit_tests/test_imports.py @@ -0,0 +1,10 @@ +from langchain_databricks import __all__ + +EXPECTED_ALL = [ + "ChatDatabricks", + "__version__", +] + + +def test_all_imports() -> None: + assert sorted(EXPECTED_ALL) == sorted(__all__) From e958f76160c6542072c7bd334fbb3c21d327e8bc Mon Sep 17 00:00:00 2001 From: Erick Friis Date: Wed, 21 Aug 2024 21:24:34 -0700 Subject: [PATCH 54/80] docs: migration guide nits (#25600) --- .../constitutional_chain.ipynb | 12 +-- .../migrating_chains/conversation_chain.ipynb | 14 +-- .../conversation_retrieval_chain.ipynb | 23 +++-- .../versions/migrating_chains/index.ipynb | 89 +++++++++++++++++++ docs/docs/versions/migrating_chains/index.mdx | 51 ----------- .../versions/migrating_chains/llm_chain.ipynb | 59 ++++++++---- .../migrating_chains/llm_math_chain.ipynb | 12 +-- .../migrating_chains/llm_router_chain.ipynb | 12 +-- .../migrating_chains/map_reduce_chain.ipynb | 20 ++--- .../map_rerank_docs_chain.ipynb | 6 +- .../migrating_chains/multi_prompt_chain.ipynb | 14 +-- .../migrating_chains/refine_docs_chain.ipynb | 8 +- .../migrating_chains/retrieval_qa.ipynb | 24 +++-- .../migrating_chains/stuff_docs_chain.ipynb | 8 +- 14 files changed, 175 insertions(+), 177 deletions(-) create mode 100644 docs/docs/versions/migrating_chains/index.ipynb delete mode 100644 docs/docs/versions/migrating_chains/index.mdx diff --git a/docs/docs/versions/migrating_chains/constitutional_chain.ipynb b/docs/docs/versions/migrating_chains/constitutional_chain.ipynb index 771145547cdf7..e5018aab6f430 100644 --- a/docs/docs/versions/migrating_chains/constitutional_chain.ipynb +++ b/docs/docs/versions/migrating_chains/constitutional_chain.ipynb @@ -1,20 +1,12 @@ { "cells": [ - { - "cell_type": "markdown", - "id": "b57124cc-60a0-4c18-b7ce-3e483d1024a2", - "metadata": {}, - "source": [ - "---\n", - "title: Migrating from ConstitutionalChain\n", - "---" - ] - }, { "cell_type": "markdown", "id": "ce8457ed-c0b1-4a74-abbd-9d3d2211270f", "metadata": {}, "source": [ + "# Migrating from ConstitutionalChain\n", + "\n", "[ConstitutionalChain](https://api.python.langchain.com/en/latest/chains/langchain.chains.constitutional_ai.base.ConstitutionalChain.html) allowed for a LLM to critique and revise generations based on [principles](https://api.python.langchain.com/en/latest/chains/langchain.chains.constitutional_ai.models.ConstitutionalPrinciple.html), structured as combinations of critique and revision requests. For example, a principle might include a request to identify harmful content, and a request to rewrite the content.\n", "\n", "`Constitutional AI principles` are based on the [Constitutional AI: Harmlessness from AI Feedback](https://arxiv.org/pdf/2212.08073) paper.\n", diff --git a/docs/docs/versions/migrating_chains/conversation_chain.ipynb b/docs/docs/versions/migrating_chains/conversation_chain.ipynb index ba56524614a76..e2d692daa3ea1 100644 --- a/docs/docs/versions/migrating_chains/conversation_chain.ipynb +++ b/docs/docs/versions/migrating_chains/conversation_chain.ipynb @@ -1,21 +1,13 @@ { "cells": [ - { - "cell_type": "markdown", - "id": "030d95bc-2f9d-492b-8245-b791b866936b", - "metadata": {}, - "source": [ - "---\n", - "title: Migrating from ConversationalChain\n", - "---" - ] - }, { "cell_type": "markdown", "id": "d20aeaad-b3ca-4a7d-b02d-3267503965af", "metadata": {}, "source": [ - "[`ConversationChain`](https://api.python.langchain.com/en/latest/chains/langchain.chains.conversation.base.ConversationChain.html) incorporates a memory of previous messages to sustain a stateful conversation.\n", + "# Migrating from ConversationalChain\n", + "\n", + "[`ConversationChain`](https://api.python.langchain.com/en/latest/chains/langchain.chains.conversation.base.ConversationChain.html) incorporated a memory of previous messages to sustain a stateful conversation.\n", "\n", "Some advantages of switching to the LCEL implementation are:\n", "\n", diff --git a/docs/docs/versions/migrating_chains/conversation_retrieval_chain.ipynb b/docs/docs/versions/migrating_chains/conversation_retrieval_chain.ipynb index 841eca7c44c85..85d4a3ac0e314 100644 --- a/docs/docs/versions/migrating_chains/conversation_retrieval_chain.ipynb +++ b/docs/docs/versions/migrating_chains/conversation_retrieval_chain.ipynb @@ -1,30 +1,29 @@ { "cells": [ - { - "cell_type": "markdown", - "id": "9e279999-6bf0-4a48-9e06-539b916dc705", - "metadata": {}, - "source": [ - "---\n", - "title: Migrating from ConversationalRetrievalChain\n", - "---" - ] - }, { "cell_type": "markdown", "id": "292a3c83-44a9-4426-bbec-f1a778d00d93", "metadata": {}, "source": [ + "# Migrating from ConversationalRetrievalChain\n", + "\n", "The [`ConversationalRetrievalChain`](https://api.python.langchain.com/en/latest/chains/langchain.chains.conversational_retrieval.base.ConversationalRetrievalChain.html) was an all-in one way that combined retrieval-augmented generation with chat history, allowing you to \"chat with\" your documents.\n", "\n", - "Advantages of switching to the LCEL implementation are similar to the `RetrievalQA` section above:\n", + "Advantages of switching to the LCEL implementation are similar to the [`RetrievalQA` migration guide](./retrieval_qa.ipynb):\n", "\n", "- Clearer internals. The `ConversationalRetrievalChain` chain hides an entire question rephrasing step which dereferences the initial query against the chat history.\n", " - This means the class contains two sets of configurable prompts, LLMs, etc.\n", "- More easily return source documents.\n", "- Support for runnable methods like streaming and async operations.\n", "\n", - "Here are side-by-side implementations with custom prompts. We'll reuse the loaded documents and vector store from the previous section:" + "Here are equivalent implementations with custom prompts.\n", + "We'll use the following ingestion code to load a [blog post by Lilian Weng](https://lilianweng.github.io/posts/2023-06-23-agent/) on autonomous agents into a local vector store:\n", + "\n", + "## Shared setup\n", + "\n", + "For both versions, we'll need to load the data with the `WebBaseLoader` document loader, split it with `RecursiveCharacterTextSplitter`, and add it to an in-memory `FAISS` vector store.\n", + "\n", + "We will also instantiate a chat model to use." ] }, { diff --git a/docs/docs/versions/migrating_chains/index.ipynb b/docs/docs/versions/migrating_chains/index.ipynb new file mode 100644 index 0000000000000..9d9ddbba0cf0f --- /dev/null +++ b/docs/docs/versions/migrating_chains/index.ipynb @@ -0,0 +1,89 @@ +{ + "cells": [ + { + "cell_type": "raw", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "---\n", + "sidebar_position: 1\n", + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to migrate from v0.0 chains\n", + "\n", + "LangChain has evolved since its initial release, and many of the original \"Chain\" classes \n", + "have been deprecated in favor of the more flexible and powerful frameworks of LCEL and LangGraph. \n", + "\n", + "This guide will help you migrate your existing v0.0 chains to the new abstractions.\n", + "\n", + ":::info How deprecated implementations work\n", + "Even though many of these implementations are deprecated, they are **still supported** in the codebase. \n", + "However, they are not recommended for new development, and we recommend re-implementing them using the following guides!\n", + "\n", + "To see the planned removal version for each deprecated implementation, check their API reference.\n", + ":::\n", + "\n", + ":::info Prerequisites\n", + "\n", + "These guides assume some familiarity with the following concepts:\n", + "- [LangChain Expression Language](/docs/concepts#langchain-expression-language-lcel)\n", + "- [LangGraph](https://langchain-ai.github.io/langgraph/)\n", + ":::\n", + "\n", + "LangChain maintains a number of legacy abstractions. Many of these can be reimplemented via short combinations of LCEL and LangGraph primitives.\n", + "\n", + "### LCEL\n", + "[LCEL](/docs/concepts/#langchain-expression-language-lcel) is designed to streamline the process of building useful apps with LLMs and combining related components. It does this by providing:\n", + "\n", + "1. **A unified interface**: Every LCEL object implements the `Runnable` interface, which defines a common set of invocation methods (`invoke`, `batch`, `stream`, `ainvoke`, ...). This makes it possible to also automatically and consistently support useful operations like streaming of intermediate steps and batching, since every chain composed of LCEL objects is itself an LCEL object.\n", + "2. **Composition primitives**: LCEL provides a number of primitives that make it easy to compose chains, parallelize components, add fallbacks, dynamically configure chain internals, and more.\n", + "\n", + "### LangGraph\n", + "[LangGraph](https://langchain-ai.github.io/langgraph/), built on top of LCEL, allows for performant orchestrations of application components while maintaining concise and readable code. It includes built-in persistence, support for cycles, and prioritizes controllability.\n", + "If LCEL grows unwieldy for larger or more complex chains, they may benefit from a LangGraph implementation.\n", + "\n", + "### Advantages\n", + "Using these frameworks for existing v0.0 chains confers some advantages:\n", + "\n", + "- The resulting chains typically implement the full `Runnable` interface, including streaming and asynchronous support where appropriate;\n", + "- The chains may be more easily extended or modified;\n", + "- The parameters of the chain are typically surfaced for easier customization (e.g., prompts) over previous versions, which tended to be subclasses and had opaque parameters and internals.\n", + "- If using LangGraph, the chain supports built-in persistence, allowing for conversational experiences via a \"memory\" of the chat history.\n", + "- If using LangGraph, the steps of the chain can be streamed, allowing for greater control and customizability.\n", + "\n", + "\n", + "The below pages assist with migration from various specific chains to LCEL and LangGraph:\n", + "\n", + "- [LLMChain](./llm_chain.ipynb)\n", + "- [ConversationChain](./conversation_chain.ipynb)\n", + "- [RetrievalQA](./retrieval_qa.ipynb)\n", + "- [ConversationalRetrievalChain](./conversation_retrieval_chain.ipynb)\n", + "- [StuffDocumentsChain](./stuff_docs_chain.ipynb)\n", + "- [MapReduceDocumentsChain](./map_reduce_chain.ipynb)\n", + "- [MapRerankDocumentsChain](./map_rerank_docs_chain.ipynb)\n", + "- [RefineDocumentsChain](./refine_docs_chain.ipynb)\n", + "- [LLMRouterChain](./llm_router_chain.ipynb)\n", + "- [MultiPromptChain](./multi_prompt_chain.ipynb)\n", + "- [LLMMathChain](./llm_math_chain.ipynb)\n", + "- [ConstitutionalChain](./constitutional_chain.ipynb)\n", + "\n", + "Check out the [LCEL conceptual docs](/docs/concepts/#langchain-expression-language-lcel) and [LangGraph docs](https://langchain-ai.github.io/langgraph/) for more background information." + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/docs/versions/migrating_chains/index.mdx b/docs/docs/versions/migrating_chains/index.mdx deleted file mode 100644 index 69f6e24c8ef27..0000000000000 --- a/docs/docs/versions/migrating_chains/index.mdx +++ /dev/null @@ -1,51 +0,0 @@ ---- -sidebar_position: 1 ---- - -# How to migrate from v0.0 chains - -:::info Prerequisites - -This guide assumes familiarity with the following concepts: -- [LangChain Expression Language](/docs/concepts#langchain-expression-language-lcel) -- [LangGraph](https://langchain-ai.github.io/langgraph/) -::: - -LangChain maintains a number of legacy abstractions. Many of these can be reimplemented via short combinations of LCEL and LangGraph primitives. - -### LCEL -[LCEL](/docs/concepts/#langchain-expression-language-lcel) is designed to streamline the process of building useful apps with LLMs and combining related components. It does this by providing: - -1. **A unified interface**: Every LCEL object implements the `Runnable` interface, which defines a common set of invocation methods (`invoke`, `batch`, `stream`, `ainvoke`, ...). This makes it possible to also automatically and consistently support useful operations like streaming of intermediate steps and batching, since every chain composed of LCEL objects is itself an LCEL object. -2. **Composition primitives**: LCEL provides a number of primitives that make it easy to compose chains, parallelize components, add fallbacks, dynamically configure chain internals, and more. - -### LangGraph -[LangGraph](https://langchain-ai.github.io/langgraph/), built on top of LCEL, allows for performant orchestrations of application components while maintaining concise and readable code. It includes built-in persistence, support for cycles, and prioritizes controllability. -If LCEL grows unwieldy for larger or more complex chains, they may benefit from a LangGraph implementation. - -### Advantages -Using these frameworks for existing v0.0 chains confers some advantages: - -- The resulting chains typically implement the full `Runnable` interface, including streaming and asynchronous support where appropriate; -- The chains may be more easily extended or modified; -- The parameters of the chain are typically surfaced for easier customization (e.g., prompts) over previous versions, which tended to be subclasses and had opaque parameters and internals. -- If using LangGraph, the chain supports built-in persistence, allowing for conversational experiences via a "memory" of the chat history. -- If using LangGraph, the steps of the chain can be streamed, allowing for greater control and customizability. - - -The below pages assist with migration from various specific chains to LCEL and LangGraph: - -- [LLMChain](/docs/versions/migrating_chains/llm_chain) -- [ConversationChain](/docs/versions/migrating_chains/conversation_chain) -- [RetrievalQA](/docs/versions/migrating_chains/retrieval_qa) -- [ConversationalRetrievalChain](/docs/versions/migrating_chains/conversation_retrieval_chain) -- [StuffDocumentsChain](/docs/versions/migrating_chains/stuff_docs_chain) -- [MapReduceDocumentsChain](/docs/versions/migrating_chains/map_reduce_chain) -- [MapRerankDocumentsChain](/docs/versions/migrating_chains/map_rerank_docs_chain) -- [RefineDocumentsChain](/docs/versions/migrating_chains/refine_docs_chain) -- [LLMRouterChain](/docs/versions/migrating_chains/llm_router_chain) -- [MultiPromptChain](/docs/versions/migrating_chains/multi_prompt_chain) -- [LLMMathChain](/docs/versions/migrating_chains/llm_math_chain) -- [ConstitutionalChain](/docs/versions/migrating_chains/constitutional_chain) - -Check out the [LCEL conceptual docs](/docs/concepts/#langchain-expression-language-lcel) and [LangGraph docs](https://langchain-ai.github.io/langgraph/) for more background information. \ No newline at end of file diff --git a/docs/docs/versions/migrating_chains/llm_chain.ipynb b/docs/docs/versions/migrating_chains/llm_chain.ipynb index c288cadf0d1de..dbbb39fd4222d 100644 --- a/docs/docs/versions/migrating_chains/llm_chain.ipynb +++ b/docs/docs/versions/migrating_chains/llm_chain.ipynb @@ -1,20 +1,12 @@ { "cells": [ - { - "cell_type": "markdown", - "id": "b57124cc-60a0-4c18-b7ce-3e483d1024a2", - "metadata": {}, - "source": [ - "---\n", - "title: Migrating from LLMChain\n", - "---" - ] - }, { "cell_type": "markdown", "id": "ce8457ed-c0b1-4a74-abbd-9d3d2211270f", "metadata": {}, "source": [ + "# Migrating from LLMChain\n", + "\n", "[`LLMChain`](https://api.python.langchain.com/en/latest/chains/langchain.chains.llm.LLMChain.html) combined a prompt template, LLM, and output parser into a class.\n", "\n", "Some advantages of switching to the LCEL implementation are:\n", @@ -36,7 +28,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "id": "717c8673", "metadata": {}, "outputs": [], @@ -44,7 +36,8 @@ "import os\n", "from getpass import getpass\n", "\n", - "os.environ[\"OPENAI_API_KEY\"] = getpass()" + "if \"OPENAI_API_KEY\" not in os.environ:\n", + " os.environ[\"OPENAI_API_KEY\"] = getpass()" ] }, { @@ -59,7 +52,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 5, "id": "f91c9809-8ee7-4e38-881d-0ace4f6ea883", "metadata": {}, "outputs": [ @@ -70,7 +63,7 @@ " 'text': \"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two tired!\"}" ] }, - "execution_count": 2, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } @@ -84,9 +77,39 @@ " [(\"user\", \"Tell me a {adjective} joke\")],\n", ")\n", "\n", - "chain = LLMChain(llm=ChatOpenAI(), prompt=prompt)\n", + "legacy_chain = LLMChain(llm=ChatOpenAI(), prompt=prompt)\n", "\n", - "chain({\"adjective\": \"funny\"})" + "legacy_result = legacy_chain({\"adjective\": \"funny\"})\n", + "legacy_result" + ] + }, + { + "cell_type": "markdown", + "id": "9f89e97b", + "metadata": {}, + "source": [ + "Note that `LLMChain` by default returned a `dict` containing both the input and the output from `StrOutputParser`, so to extract the output, you need to access the `\"text\"` key." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "c7fa1618", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two tired!\"" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "legacy_result[\"text\"]" ] }, { @@ -137,7 +160,7 @@ "id": "3c0b0513-77b8-4371-a20e-3e487cec7e7f", "metadata": {}, "source": [ - "Note that `LLMChain` by default returns a `dict` containing both the input and the output. If this behavior is desired, we can replicate it using another LCEL primitive, [`RunnablePassthrough`](https://api.python.langchain.com/en/latest/runnables/langchain_core.runnables.passthrough.RunnablePassthrough.html):" + "If you'd like to mimic the `dict` packaging of input and output in `LLMChain`, you can use a [`RunnablePassthrough.assign`](https://api.python.langchain.com/en/latest/runnables/langchain_core.runnables.passthrough.RunnablePassthrough.html) like:" ] }, { @@ -197,7 +220,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.4" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/docs/versions/migrating_chains/llm_math_chain.ipynb b/docs/docs/versions/migrating_chains/llm_math_chain.ipynb index 87f2511085e7c..0c1bcefd98b68 100644 --- a/docs/docs/versions/migrating_chains/llm_math_chain.ipynb +++ b/docs/docs/versions/migrating_chains/llm_math_chain.ipynb @@ -1,20 +1,12 @@ { "cells": [ - { - "cell_type": "markdown", - "id": "b57124cc-60a0-4c18-b7ce-3e483d1024a2", - "metadata": {}, - "source": [ - "---\n", - "title: Migrating from LLMMathChain\n", - "---" - ] - }, { "cell_type": "markdown", "id": "ce8457ed-c0b1-4a74-abbd-9d3d2211270f", "metadata": {}, "source": [ + "# Migrating from LLMMathChain\n", + "\n", "[`LLMMathChain`](https://api.python.langchain.com/en/latest/chains/langchain.chains.llm_math.base.LLMMathChain.html) enabled the evaluation of mathematical expressions generated by a LLM. Instructions for generating the expressions were formatted into the prompt, and the expressions were parsed out of the string response before evaluation using the [numexpr](https://numexpr.readthedocs.io/en/latest/user_guide.html) library.\n", "\n", "This is more naturally achieved via [tool calling](/docs/concepts/#functiontool-calling). We can equip a chat model with a simple calculator tool leveraging `numexpr` and construct a simple chain around it using [LangGraph](https://langchain-ai.github.io/langgraph/). Some advantages of this approach include:\n", diff --git a/docs/docs/versions/migrating_chains/llm_router_chain.ipynb b/docs/docs/versions/migrating_chains/llm_router_chain.ipynb index 64561caa9651d..8247e1dbd7821 100644 --- a/docs/docs/versions/migrating_chains/llm_router_chain.ipynb +++ b/docs/docs/versions/migrating_chains/llm_router_chain.ipynb @@ -1,20 +1,12 @@ { "cells": [ - { - "cell_type": "markdown", - "id": "575befea-4d98-4941-8e55-1581b169a674", - "metadata": {}, - "source": [ - "---\n", - "title: Migrating from LLMRouterChain\n", - "---" - ] - }, { "cell_type": "markdown", "id": "14625d35-efca-41cf-b203-be9f4c375700", "metadata": {}, "source": [ + "# Migrating from LLMRouterChain\n", + "\n", "The [`LLMRouterChain`](https://api.python.langchain.com/en/latest/chains/langchain.chains.router.llm_router.LLMRouterChain.html) routed an input query to one of multiple destinations-- that is, given an input query, it used a LLM to select from a list of destination chains, and passed its inputs to the selected chain.\n", "\n", "`LLMRouterChain` does not support common [chat model](/docs/concepts/#chat-models) features, such as message roles and [tool calling](/docs/concepts/#functiontool-calling). Under the hood, `LLMRouterChain` routes a query by instructing the LLM to generate JSON-formatted text, and parsing out the intended destination.\n", diff --git a/docs/docs/versions/migrating_chains/map_reduce_chain.ipynb b/docs/docs/versions/migrating_chains/map_reduce_chain.ipynb index 0520e67f8d7f5..845c2eb3137a5 100644 --- a/docs/docs/versions/migrating_chains/map_reduce_chain.ipynb +++ b/docs/docs/versions/migrating_chains/map_reduce_chain.ipynb @@ -1,20 +1,12 @@ { "cells": [ - { - "cell_type": "markdown", - "id": "3270b34b-8958-425c-886a-ea4b9e26b475", - "metadata": {}, - "source": [ - "---\n", - "title: Migrating from MapReduceDocumentsChain\n", - "---" - ] - }, { "cell_type": "markdown", "id": "2c7bdc91-9b89-4e59-bc27-89508b024635", "metadata": {}, "source": [ + "# Migrating from MapReduceDocumentsChain\n", + "\n", "[MapReduceDocumentsChain](https://api.python.langchain.com/en/latest/chains/langchain.chains.combine_documents.map_reduce.MapReduceDocumentsChain.html) implements a map-reduce strategy over (potentially long) texts. The strategy is as follows:\n", "\n", "- Split a text into smaller documents;\n", @@ -37,11 +29,9 @@ "\n", "Let's first load a chat model:\n", "\n", - "```{=mdx}\n", "import ChatModelTabs from \"@theme/ChatModelTabs\";\n", "\n", - "\n", - "```" + "" ] }, { @@ -66,7 +56,7 @@ "source": [ "## Basic example (short documents)\n", "\n", - "Let's generate some simple documents for illustrative purposes." + "Let's use the following 3 documents for illustrative purposes." ] }, { @@ -206,7 +196,7 @@ "metadata": {}, "outputs": [], "source": [ - "pip install -qU langgraph" + "% pip install -qU langgraph" ] }, { diff --git a/docs/docs/versions/migrating_chains/map_rerank_docs_chain.ipynb b/docs/docs/versions/migrating_chains/map_rerank_docs_chain.ipynb index 43b3408b2ba49..2e0a36f16c9ca 100644 --- a/docs/docs/versions/migrating_chains/map_rerank_docs_chain.ipynb +++ b/docs/docs/versions/migrating_chains/map_rerank_docs_chain.ipynb @@ -5,9 +5,7 @@ "id": "9db5ad7a-857e-46ea-9d0c-ba3fbe62fc81", "metadata": {}, "source": [ - "---\n", - "title: Migrating from MapRerankDocumentsChain\n", - "---\n", + "# Migrating from MapRerankDocumentsChain\n", "\n", "[MapRerankDocumentsChain](https://api.python.langchain.com/en/latest/chains/langchain.chains.combine_documents.map_rerank.MapRerankDocumentsChain.html) implements a strategy for analyzing long texts. The strategy is as follows:\n", "\n", @@ -27,7 +25,7 @@ "source": [ "## Example\n", "\n", - "Let's go through an example where we analyze a set of documents. We first generate some simple documents for illustrative purposes:" + "Let's go through an example where we analyze a set of documents. Let's use the following 3 documents:" ] }, { diff --git a/docs/docs/versions/migrating_chains/multi_prompt_chain.ipynb b/docs/docs/versions/migrating_chains/multi_prompt_chain.ipynb index c1613464382f1..3bbfe7e729fc4 100644 --- a/docs/docs/versions/migrating_chains/multi_prompt_chain.ipynb +++ b/docs/docs/versions/migrating_chains/multi_prompt_chain.ipynb @@ -1,20 +1,12 @@ { "cells": [ - { - "cell_type": "markdown", - "id": "575befea-4d98-4941-8e55-1581b169a674", - "metadata": {}, - "source": [ - "---\n", - "title: Migrating from MultiPromptChain\n", - "---" - ] - }, { "cell_type": "markdown", "id": "14625d35-efca-41cf-b203-be9f4c375700", "metadata": {}, "source": [ + "# Migrating from MultiPromptChain\n", + "\n", "The [`MultiPromptChain`](https://api.python.langchain.com/en/latest/chains/langchain.chains.router.multi_prompt.MultiPromptChain.html) routed an input query to one of multiple LLMChains-- that is, given an input query, it used a LLM to select from a list of prompts, formatted the query into the prompt, and generated a response.\n", "\n", "`MultiPromptChain` does not support common [chat model](/docs/concepts/#chat-models) features, such as message roles and [tool calling](/docs/concepts/#functiontool-calling).\n", @@ -321,7 +313,7 @@ "\n", "## Overview:\n", "\n", - "- Under the hood, `MultiPromptChain` routes the query by instructing the LLM to generate JSON-formatted text, and parses out the intended destination. It takes a registry of string prompt templates as input.\n", + "- Under the hood, `MultiPromptChain` routed the query by instructing the LLM to generate JSON-formatted text, and parses out the intended destination. It took a registry of string prompt templates as input.\n", "- The LangGraph implementation, implemented above via lower-level primitives, uses tool-calling to route to arbitrary chains. In this example, the chains include chat model templates and chat models." ] }, diff --git a/docs/docs/versions/migrating_chains/refine_docs_chain.ipynb b/docs/docs/versions/migrating_chains/refine_docs_chain.ipynb index 51fd1bbcc438b..3a6ee15654962 100644 --- a/docs/docs/versions/migrating_chains/refine_docs_chain.ipynb +++ b/docs/docs/versions/migrating_chains/refine_docs_chain.ipynb @@ -5,9 +5,7 @@ "id": "32eee276-7847-45d8-b303-dccc330c8a1a", "metadata": {}, "source": [ - "---\n", - "title: Migrating from RefineDocumentsChain\n", - "---\n", + "# Migrating from RefineDocumentsChain\n", "\n", "[RefineDocumentsChain](https://api.python.langchain.com/en/latest/chains/langchain.chains.combine_documents.refine.RefineDocumentsChain.html) implements a strategy for analyzing long texts. The strategy is as follows:\n", "\n", @@ -28,11 +26,9 @@ "\n", "Let's first load a chat model:\n", "\n", - "```{=mdx}\n", "import ChatModelTabs from \"@theme/ChatModelTabs\";\n", "\n", - "\n", - "```" + "" ] }, { diff --git a/docs/docs/versions/migrating_chains/retrieval_qa.ipynb b/docs/docs/versions/migrating_chains/retrieval_qa.ipynb index 2f1c45daa9027..6a0b84139c09c 100644 --- a/docs/docs/versions/migrating_chains/retrieval_qa.ipynb +++ b/docs/docs/versions/migrating_chains/retrieval_qa.ipynb @@ -1,21 +1,13 @@ { "cells": [ - { - "cell_type": "markdown", - "id": "eddcd5c1-cbe9-4a7d-8903-7d1ab29f9094", - "metadata": {}, - "source": [ - "---\n", - "title: Migrating from RetrievalQA\n", - "---" - ] - }, { "cell_type": "markdown", "id": "b2d37868-dd01-4814-a76a-256f36cf66f7", "metadata": {}, "source": [ - "The [`RetrievalQA`](https://api.python.langchain.com/en/latest/chains/langchain.chains.retrieval_qa.base.RetrievalQA.html) chain performed natural-language question answering over a data source using retrieval-augmented generation.\n", + "# Migrating from RetrievalQA\n", + "\n", + "The [`RetrievalQA` chain](https://api.python.langchain.com/en/latest/chains/langchain.chains.retrieval_qa.base.RetrievalQA.html) performed natural-language question answering over a data source using retrieval-augmented generation.\n", "\n", "Some advantages of switching to the LCEL implementation are:\n", "\n", @@ -23,7 +15,13 @@ "- More easily return source documents.\n", "- Support for runnable methods like streaming and async operations.\n", "\n", - "Now let's look at them side-by-side. We'll use the same ingestion code to load a [blog post by Lilian Weng](https://lilianweng.github.io/posts/2023-06-23-agent/) on autonomous agents into a local vector store:" + "Now let's look at them side-by-side. We'll use the following ingestion code to load a [blog post by Lilian Weng](https://lilianweng.github.io/posts/2023-06-23-agent/) on autonomous agents into a local vector store:\n", + "\n", + "## Shared setup\n", + "\n", + "For both versions, we'll need to load the data with the `WebBaseLoader` document loader, split it with `RecursiveCharacterTextSplitter`, and add it to an in-memory `FAISS` vector store.\n", + "\n", + "We will also instantiate a chat model to use." ] }, { @@ -227,7 +225,7 @@ "\n", "## Next steps\n", "\n", - "Check out the [LCEL conceptual docs](/docs/concepts/#langchain-expression-language-lcel) for more background information." + "Check out the [LCEL conceptual docs](/docs/concepts/#langchain-expression-language-lcel) for more background information on the LangChain expression language." ] } ], diff --git a/docs/docs/versions/migrating_chains/stuff_docs_chain.ipynb b/docs/docs/versions/migrating_chains/stuff_docs_chain.ipynb index c7e255b884533..9596a1ddbcc25 100644 --- a/docs/docs/versions/migrating_chains/stuff_docs_chain.ipynb +++ b/docs/docs/versions/migrating_chains/stuff_docs_chain.ipynb @@ -5,9 +5,7 @@ "id": "ed78c53c-55ad-4ea2-9cc2-a39a1963c098", "metadata": {}, "source": [ - "---\n", - "title: Migrating from StuffDocumentsChain\n", - "---\n", + "# Migrating from StuffDocumentsChain\n", "\n", "[StuffDocumentsChain](https://api.python.langchain.com/en/latest/chains/langchain.chains.combine_documents.stuff.StuffDocumentsChain.html) combines documents by concatenating them into a single context window. It is a straightforward and effective strategy for combining documents for question-answering, summarization, and other purposes.\n", "\n", @@ -17,11 +15,9 @@ "\n", "Let's first load a chat model:\n", "\n", - "```{=mdx}\n", "import ChatModelTabs from \"@theme/ChatModelTabs\";\n", "\n", - "\n", - "```" + "" ] }, { From 0091947efdcf99ba32165cd5784084a4ef278c9c Mon Sep 17 00:00:00 2001 From: Noah Mayerhofer Date: Thu, 22 Aug 2024 15:07:36 +0200 Subject: [PATCH 55/80] community: add retry for session expired exception in neo4j (#25660) Description: The neo4j driver can raise a SessionExpired error, which is considered a retriable error. If a query fails with a SessionExpired error, this change retries every query once. This change will make the neo4j integration less flaky. Twitter handle: noahmay_ --- .../vectorstores/neo4j_vector.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/libs/community/langchain_community/vectorstores/neo4j_vector.py b/libs/community/langchain_community/vectorstores/neo4j_vector.py index c78c411611290..a6aa4390fd687 100644 --- a/libs/community/langchain_community/vectorstores/neo4j_vector.py +++ b/libs/community/langchain_community/vectorstores/neo4j_vector.py @@ -587,7 +587,11 @@ def __init__( pass def query( - self, query: str, *, params: Optional[dict] = None + self, + query: str, + *, + params: Optional[dict] = None, + retry_on_session_expired: bool = True, ) -> List[Dict[str, Any]]: """ This method sends a Cypher query to the connected Neo4j database @@ -600,7 +604,7 @@ def query( Returns: List[Dict[str, Any]]: List of dictionaries containing the query results. """ - from neo4j.exceptions import CypherSyntaxError + from neo4j.exceptions import CypherSyntaxError, SessionExpired params = params or {} with self._driver.session(database=self._database) as session: @@ -609,6 +613,15 @@ def query( return [r.data() for r in data] except CypherSyntaxError as e: raise ValueError(f"Cypher Statement is not valid\n{e}") + except ( + SessionExpired + ) as e: # Session expired is a transient error that can be retried + if retry_on_session_expired: + return self.query( + query, params=params, retry_on_session_expired=False + ) + else: + raise e def verify_version(self) -> None: """ From 6247259438eaaaed33f5c0d926ed1d69799a983d Mon Sep 17 00:00:00 2001 From: Swastik-Swarup-Dash <143310346+Swastik-Swarup-Dash@users.noreply.github.com> Date: Thu, 22 Aug 2024 18:48:32 +0530 Subject: [PATCH 56/80] update_readme (#25665) Updated LangChain Expression Language (LCEL). for Easier Understanding --- README.md | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 7909cd9f3eb6b..963a3ba17feb3 100644 --- a/README.md +++ b/README.md @@ -14,18 +14,20 @@ Looking for the JS/TS library? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). -To help you ship LangChain apps to production faster, check out [LangSmith](https://smith.langchain.com). -[LangSmith](https://smith.langchain.com) is a unified developer platform for building, testing, and monitoring LLM applications. +To help you ship LangChain apps to production faster, check out [LangSmith](https://smith.langchain.com). +[LangSmith](https://smith.langchain.com) is a unified developer platform for building, testing, and monitoring LLM applications. Fill out [this form](https://www.langchain.com/contact-sales) to speak with our sales team. ## Quick Install With pip: + ```bash pip install langchain ``` With conda: + ```bash conda install langchain -c conda-forge ``` @@ -36,12 +38,13 @@ conda install langchain -c conda-forge For these applications, LangChain simplifies the entire application lifecycle: -- **Open-source libraries**: Build your applications using LangChain's open-source [building blocks](https://python.langchain.com/v0.2/docs/concepts#langchain-expression-language-lcel), [components](https://python.langchain.com/v0.2/docs/concepts), and [third-party integrations](https://python.langchain.com/v0.2/docs/integrations/platforms/). -Use [LangGraph](/docs/concepts/#langgraph) to build stateful agents with first-class streaming and human-in-the-loop support. +- **Open-source libraries**: Build your applications using LangChain's open-source [building blocks](https://python.langchain.com/v0.2/docs/concepts#langchain-expression-language-lcel), [components](https://python.langchain.com/v0.2/docs/concepts), and [third-party integrations](https://python.langchain.com/v0.2/docs/integrations/platforms/). + Use [LangGraph](/docs/concepts/#langgraph) to build stateful agents with first-class streaming and human-in-the-loop support. - **Productionization**: Inspect, monitor, and evaluate your apps with [LangSmith](https://docs.smith.langchain.com/) so that you can constantly optimize and deploy with confidence. - **Deployment**: Turn your LangGraph applications into production-ready APIs and Assistants with [LangGraph Cloud](https://langchain-ai.github.io/langgraph/cloud/). ### Open-source libraries + - **`langchain-core`**: Base abstractions and LangChain Expression Language. - **`langchain-community`**: Third party integrations. - Some integrations have been further split into **partner packages** that only rely on **`langchain-core`**. Examples include **`langchain_openai`** and **`langchain_anthropic`**. @@ -49,9 +52,11 @@ Use [LangGraph](/docs/concepts/#langgraph) to build stateful agents with first-c - **[`LangGraph`](https://langchain-ai.github.io/langgraph/)**: A library for building robust and stateful multi-actor applications with LLMs by modeling steps as edges and nodes in a graph. Integrates smoothly with LangChain, but can be used without it. ### Productionization: + - **[LangSmith](https://docs.smith.langchain.com/)**: A developer platform that lets you debug, test, evaluate, and monitor chains built on any LLM framework and seamlessly integrates with LangChain. ### Deployment: + - **[LangGraph Cloud](https://langchain-ai.github.io/langgraph/cloud/)**: Turn your LangGraph applications into production-ready APIs and Assistants. ![Diagram outlining the hierarchical organization of the LangChain framework, displaying the interconnected parts across multiple layers.](docs/static/svg/langchain_stack_062024.svg "LangChain Architecture Overview") @@ -76,15 +81,17 @@ Use [LangGraph](/docs/concepts/#langgraph) to build stateful agents with first-c And much more! Head to the [Tutorials](https://python.langchain.com/v0.2/docs/tutorials/) section of the docs for more. ## 🚀 How does LangChain help? + The main value props of the LangChain libraries are: + 1. **Components**: composable building blocks, tools and integrations for working with language models. Components are modular and easy-to-use, whether you are using the rest of the LangChain framework or not 2. **Off-the-shelf chains**: built-in assemblages of components for accomplishing higher-level tasks -Off-the-shelf chains make it easy to get started. Components make it easy to customize existing chains and build new ones. +Off-the-shelf chains make it easy to get started. Components make it easy to customize existing chains and build new ones. ## LangChain Expression Language (LCEL) -LCEL is the foundation of many of LangChain's components, and is a declarative way to compose chains. LCEL was designed from day 1 to support putting prototypes in production, with no code changes, from the simplest “prompt + LLM” chain to the most complex chains. +LCEL is a key part of LangChain, allowing you to build and organize chains of processes in a straightforward, declarative manner. It was designed to support taking prototypes directly into production without needing to alter any code. This means you can use LCEL to set up everything from basic "prompt + LLM" setups to intricate, multi-step workflows. - **[Overview](https://python.langchain.com/v0.2/docs/concepts/#langchain-expression-language-lcel)**: LCEL and its benefits - **[Interface](https://python.langchain.com/v0.2/docs/concepts/#runnable-interface)**: The standard Runnable interface for LCEL objects @@ -123,7 +130,6 @@ Please see [here](https://python.langchain.com) for full documentation, which in - [🦜🕸️ LangGraph](https://langchain-ai.github.io/langgraph/): Create stateful, multi-actor applications with LLMs. Integrates smoothly with LangChain, but can be used without it. - [🦜🏓 LangServe](https://python.langchain.com/docs/langserve): Deploy LangChain runnables and chains as REST APIs. - ## 💁 Contributing As an open-source project in a rapidly developing field, we are extremely open to contributions, whether it be in the form of a new feature, improved infrastructure, or better documentation. From 67b6e6c2e3a29496aef06b106d4101f89a0fb65d Mon Sep 17 00:00:00 2001 From: ccurme Date: Thu, 22 Aug 2024 09:22:03 -0400 Subject: [PATCH 57/80] docs[patch]: update AWS integration docs (#25631) De-beta ChatBedrockConverse. --- docs/docs/integrations/chat/bedrock.ipynb | 105 ++++++++++++++++++---- docs/docs/integrations/platforms/aws.mdx | 9 ++ 2 files changed, 99 insertions(+), 15 deletions(-) diff --git a/docs/docs/integrations/chat/bedrock.ipynb b/docs/docs/integrations/chat/bedrock.ipynb index f20196029eb6d..28083f0434e7f 100644 --- a/docs/docs/integrations/chat/bedrock.ipynb +++ b/docs/docs/integrations/chat/bedrock.ipynb @@ -95,7 +95,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 1, "id": "cb09c344-1836-4e0c-acf8-11d13ac1dbae", "metadata": {}, "outputs": [], @@ -223,34 +223,28 @@ "id": "d1ee55bc-ffc8-4cfa-801c-993953a08cfd", "metadata": {}, "source": [ - "## ***Beta***: Bedrock Converse API\n", + "## Bedrock Converse API\n", "\n", - "AWS has recently recently the Bedrock Converse API which provides a unified conversational interface for Bedrock models. This API does not yet support custom models. You can see a list of all [models that are supported here](https://docs.aws.amazon.com/bedrock/latest/userguide/conversation-inference.html). To improve reliability the ChatBedrock integration will switch to using the Bedrock Converse API as soon as it has feature parity with the existing Bedrock API. Until then a separate [ChatBedrockConverse](https://api.python.langchain.com/en/latest/chat_models/langchain_aws.chat_models.bedrock_converse.ChatBedrockConverse.html#langchain_aws.chat_models.bedrock_converse.ChatBedrockConverse) integration has been released in beta for users who do not need to use custom models.\n", + "AWS has recently released the Bedrock Converse API which provides a unified conversational interface for Bedrock models. This API does not yet support custom models. You can see a list of all [models that are supported here](https://docs.aws.amazon.com/bedrock/latest/userguide/conversation-inference.html). To improve reliability the ChatBedrock integration will switch to using the Bedrock Converse API as soon as it has feature parity with the existing Bedrock API. Until then a separate [ChatBedrockConverse](https://python.langchain.com/v0.2/api_reference/aws/chat_models/langchain_aws.chat_models.bedrock_converse.ChatBedrockConverse.html) integration has been released.\n", + "\n", + "We recommend using `ChatBedrockConverse` for users who do not need to use custom models.\n", "\n", "You can use it like so:" ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 3, "id": "ae728e59-94d4-40cf-9d24-25ad8723fc59", "metadata": {}, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/bagatur/langchain/libs/core/langchain_core/_api/beta_decorator.py:87: LangChainBetaWarning: The class `ChatBedrockConverse` is in beta. It is actively being worked on, so the API may change.\n", - " warn_beta(\n" - ] - }, { "data": { "text/plain": [ - "AIMessage(content=\"Voici la traduction en français :\\n\\nJ'aime la programmation.\", response_metadata={'ResponseMetadata': {'RequestId': '122fb1c8-c3c5-4b06-941e-c95d210bfbc7', 'HTTPStatusCode': 200, 'HTTPHeaders': {'date': 'Mon, 01 Jul 2024 21:48:25 GMT', 'content-type': 'application/json', 'content-length': '243', 'connection': 'keep-alive', 'x-amzn-requestid': '122fb1c8-c3c5-4b06-941e-c95d210bfbc7'}, 'RetryAttempts': 0}, 'stopReason': 'end_turn', 'metrics': {'latencyMs': 830}}, id='run-0e3df22f-fcd8-4fbb-a4fb-565227e7e430-0', usage_metadata={'input_tokens': 29, 'output_tokens': 21, 'total_tokens': 50})" + "AIMessage(content=\"Voici la traduction en français :\\n\\nJ'aime la programmation.\", response_metadata={'ResponseMetadata': {'RequestId': '4fcbfbe9-f916-4df2-b0bd-ea1147b550aa', 'HTTPStatusCode': 200, 'HTTPHeaders': {'date': 'Wed, 21 Aug 2024 17:23:49 GMT', 'content-type': 'application/json', 'content-length': '243', 'connection': 'keep-alive', 'x-amzn-requestid': '4fcbfbe9-f916-4df2-b0bd-ea1147b550aa'}, 'RetryAttempts': 0}, 'stopReason': 'end_turn', 'metrics': {'latencyMs': 672}}, id='run-77ee9810-e32b-45dc-9ccb-6692253b1f45-0', usage_metadata={'input_tokens': 29, 'output_tokens': 21, 'total_tokens': 50})" ] }, - "execution_count": 8, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } @@ -268,6 +262,87 @@ "llm.invoke(messages)" ] }, + { + "cell_type": "markdown", + "id": "4da16f3e-e80b-48c0-8036-c1cc5f7c8c05", + "metadata": {}, + "source": [ + "### Streaming\n", + "\n", + "Note that `ChatBedrockConverse` emits content blocks while streaming:" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "7794b32e-d8de-4973-bf0f-39807dc745f0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "content=[] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': 'Vo', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': 'ici', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': ' la', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': ' tra', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': 'duction', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': ' en', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': ' français', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': ' :', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': '\\n\\nJ', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': \"'\", 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': 'a', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': 'ime', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': ' la', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': ' programm', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': 'ation', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'type': 'text', 'text': '.', 'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[{'index': 0}] id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[] response_metadata={'stopReason': 'end_turn'} id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8'\n", + "content=[] response_metadata={'metrics': {'latencyMs': 713}} id='run-2c92c5af-d771-4cc2-98d9-c11bbd30a1d8' usage_metadata={'input_tokens': 29, 'output_tokens': 21, 'total_tokens': 50}\n" + ] + } + ], + "source": [ + "for chunk in llm.stream(messages):\n", + " print(chunk)" + ] + }, + { + "cell_type": "markdown", + "id": "0ef05abb-9c04-4dc3-995e-f857779644d5", + "metadata": {}, + "source": [ + "An output parser can be used to filter to text, if desired:" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "2a4e743f-ea7d-4e5a-9b12-f9992362de8b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "|Vo|ici| la| tra|duction| en| français| :|\n", + "\n", + "J|'|a|ime| la| programm|ation|.||||" + ] + } + ], + "source": [ + "from langchain_core.output_parsers import StrOutputParser\n", + "\n", + "chain = llm | StrOutputParser()\n", + "\n", + "for chunk in chain.stream(messages):\n", + " print(chunk, end=\"|\")" + ] + }, { "cell_type": "markdown", "id": "3a5bb5ca-c3ae-4a58-be67-2cd18574b9a3", @@ -297,7 +372,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.9" + "version": "3.10.4" } }, "nbformat": 4, diff --git a/docs/docs/integrations/platforms/aws.mdx b/docs/docs/integrations/platforms/aws.mdx index 601ef5c75d1b7..da22219b0afd1 100755 --- a/docs/docs/integrations/platforms/aws.mdx +++ b/docs/docs/integrations/platforms/aws.mdx @@ -34,6 +34,15 @@ See a [usage example](/docs/integrations/chat/bedrock). from langchain_aws import ChatBedrock ``` +### Bedrock Converse +AWS has recently released the Bedrock Converse API which provides a unified conversational interface for Bedrock models. This API does not yet support custom models. You can see a list of all [models that are supported here](https://docs.aws.amazon.com/bedrock/latest/userguide/conversation-inference.html). To improve reliability the ChatBedrock integration will switch to using the Bedrock Converse API as soon as it has feature parity with the existing Bedrock API. Until then a separate [ChatBedrockConverse](https://python.langchain.com/v0.2/api_reference/aws/chat_models/langchain_aws.chat_models.bedrock_converse.ChatBedrockConverse.html) integration has been released. + +We recommend using `ChatBedrockConverse` for users who do not need to use custom models. See the [docs](/docs/integrations/chat/bedrock/#bedrock-converse-api) and [API reference](https://python.langchain.com/v0.2/api_reference/aws/chat_models/langchain_aws.chat_models.bedrock_converse.ChatBedrockConverse.html) for more detail. + +```python +from langchain_aws import ChatBedrockConverse +``` + ## LLMs ### Bedrock From 820da64983e1d7df62baf1d21ed0c5f99cc5e490 Mon Sep 17 00:00:00 2001 From: Krishna Kulkarni Date: Thu, 22 Aug 2024 20:03:45 +0530 Subject: [PATCH 58/80] limit the most recent documents to fetch from MongoDB database. (#25435) limit the most recent documents to fetch from MongoDB database. Thank you for contributing to LangChain! - [ ] **limit the most recent documents to fetch from MongoDB database.**: "langchain_mongodb: limit the most recent documents to fetch from MongoDB database." - [ ] **PR message**: ***Delete this entire checklist*** and replace with - **Description:** Added a doc_limit parameter which enables the limit for the documents to fetch from MongoDB database - **Issue:** - **Dependencies:** None --------- Co-authored-by: Chester Curme --- .../langchain_mongodb/chat_message_histories.py | 14 +++++++++++++- .../unit_tests/test_chat_message_histories.py | 1 + 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/libs/partners/mongodb/langchain_mongodb/chat_message_histories.py b/libs/partners/mongodb/langchain_mongodb/chat_message_histories.py index be594e0921203..9ad45ae4e1ab8 100644 --- a/libs/partners/mongodb/langchain_mongodb/chat_message_histories.py +++ b/libs/partners/mongodb/langchain_mongodb/chat_message_histories.py @@ -68,6 +68,7 @@ def __init__( session_id_key: str = DEFAULT_SESSION_ID_KEY, history_key: str = DEFAULT_HISTORY_KEY, create_index: bool = True, + history_size: Optional[int] = None, index_kwargs: Optional[Dict] = None, ): """Initialize with a MongoDBChatMessageHistory instance. @@ -88,6 +89,8 @@ def __init__( name of the field that stores the chat history. create_index: Optional[bool] whether to create an index on the session id field. + history_size: Optional[int] + count of (most recent) messages to fetch from MongoDB. index_kwargs: Optional[Dict] additional keyword arguments to pass to the index creation. """ @@ -97,6 +100,7 @@ def __init__( self.collection_name = collection_name self.session_id_key = session_id_key self.history_key = history_key + self.history_size = history_size try: self.client: MongoClient = MongoClient(connection_string) @@ -114,7 +118,15 @@ def __init__( def messages(self) -> List[BaseMessage]: # type: ignore """Retrieve the messages from MongoDB""" try: - cursor = self.collection.find({self.session_id_key: self.session_id}) + if self.history_size is None: + cursor = self.collection.find({self.session_id_key: self.session_id}) + else: + skip_count = max( + 0, self.collection.count_documents({}) - self.history_size + ) + cursor = self.collection.find( + {self.session_id_key: self.session_id}, skip=skip_count + ) except errors.OperationFailure as error: logger.error(error) diff --git a/libs/partners/mongodb/tests/unit_tests/test_chat_message_histories.py b/libs/partners/mongodb/tests/unit_tests/test_chat_message_histories.py index 4f3fa4af2b343..2031602c2b281 100644 --- a/libs/partners/mongodb/tests/unit_tests/test_chat_message_histories.py +++ b/libs/partners/mongodb/tests/unit_tests/test_chat_message_histories.py @@ -16,6 +16,7 @@ def __init__(self) -> None: self.collection = MockCollection() self.session_id_key = "SessionId" self.history_key = "History" + self.history_size = None def test_memory_with_message_store() -> None: From 5e3a321f713eeda43cf0637c7393467643c36f31 Mon Sep 17 00:00:00 2001 From: maang-h <55082429+maang-h@users.noreply.github.com> Date: Thu, 22 Aug 2024 22:34:41 +0800 Subject: [PATCH 59/80] docs: Add ChatZhipuAI tool calling and structured output docstring (#25669) - **Description:** Add `ChatZhipuAI` tool calling and structured output docstring. --- .../chat_models/zhipuai.py | 68 ++++++++++++++++++- 1 file changed, 67 insertions(+), 1 deletion(-) diff --git a/libs/community/langchain_community/chat_models/zhipuai.py b/libs/community/langchain_community/chat_models/zhipuai.py index 349a8b59ca4d8..8093e3cc52157 100644 --- a/libs/community/langchain_community/chat_models/zhipuai.py +++ b/libs/community/langchain_community/chat_models/zhipuai.py @@ -117,7 +117,12 @@ def _get_jwt_token(api_key: str) -> str: Returns: The JWT token. """ - import jwt + try: + import jwt + except ImportError: + raise ImportError( + "jwt package not found, please install it with" "`pip install pyjwt`" + ) try: id, secret = api_key.split(".") @@ -323,6 +328,67 @@ class ChatZhipuAI(BaseChatModel): [AIMessage(content='I enjoy programming.', response_metadata={'token_usage': {'completion_tokens': 6, 'prompt_tokens': 23, 'total_tokens': 29}, 'model_name': 'glm-4', 'finish_reason': 'stop'}, id='run-ba06af9d-4baa-40b2-9298-be9c62aa0849-0')] + Tool calling: + .. code-block:: python + + from langchain_core.pydantic_v1 import BaseModel, Field + + + class GetWeather(BaseModel): + '''Get the current weather in a given location''' + + location: str = Field( + ..., description="The city and state, e.g. San Francisco, CA" + ) + + + class GetPopulation(BaseModel): + '''Get the current population in a given location''' + + location: str = Field( + ..., description="The city and state, e.g. San Francisco, CA" + ) + + chat_with_tools = zhipuai_chat.bind_tools([GetWeather, GetPopulation]) + ai_msg = chat_with_tools.invoke( + "Which city is hotter today and which is bigger: LA or NY?" + ) + ai_msg.tool_calls + + .. code-block:: python + + [ + { + 'name': 'GetWeather', + 'args': {'location': 'Los Angeles, CA'}, + 'id': 'call_202408222146464ea49ec8731145a9', + 'type': 'tool_call' + } + ] + + Structured output: + .. code-block:: python + + from typing import Optional + + from langchain_core.pydantic_v1 import BaseModel, Field + + + class Joke(BaseModel): + '''Joke to tell user.''' + + setup: str = Field(description="The setup of the joke") + punchline: str = Field(description="The punchline to the joke") + rating: Optional[int] = Field(description="How funny the joke is, from 1 to 10") + + + structured_chat = zhipuai_chat.with_structured_output(Joke) + structured_chat.invoke("Tell me a joke about cats") + + .. code-block:: python + + Joke(setup='What do cats like to eat for breakfast?', punchline='Mice Krispies!', rating=None) + Response metadata .. code-block:: python From 1f1679e9600d2f4aa750fafe90978c513dab1808 Mon Sep 17 00:00:00 2001 From: Rajendra Kadam Date: Thu, 22 Aug 2024 21:16:52 +0530 Subject: [PATCH 60/80] community: Refactor PebbloSafeLoader (#25582) **Refactor PebbloSafeLoader** - Created `APIWrapper` and moved API logic into it. - Moved helper functions to the utility file. - Created smaller functions and methods for better readability. - Properly read environment variables. - Removed unused code. **Issue:** NA **Dependencies:** NA **tests**: Updated --- .../document_loaders/pebblo.py | 339 +-------------- .../langchain_community/utilities/pebblo.py | 407 +++++++++++++++++- .../document_loaders/test_pebblo.py | 3 +- 3 files changed, 422 insertions(+), 327 deletions(-) diff --git a/libs/community/langchain_community/document_loaders/pebblo.py b/libs/community/langchain_community/document_loaders/pebblo.py index b3bd447516129..772a206a803d8 100644 --- a/libs/community/langchain_community/document_loaders/pebblo.py +++ b/libs/community/langchain_community/document_loaders/pebblo.py @@ -1,31 +1,25 @@ """Pebblo's safe dataloader is a wrapper for document loaders""" -import json import logging import os import uuid -from http import HTTPStatus -from typing import Any, Dict, Iterator, List, Optional +from typing import Dict, Iterator, List, Optional -import requests # type: ignore from langchain_core.documents import Document from langchain_community.document_loaders.base import BaseLoader from langchain_community.utilities.pebblo import ( - APP_DISCOVER_URL, BATCH_SIZE_BYTES, - CLASSIFIER_URL, - LOADER_DOC_URL, - PEBBLO_CLOUD_URL, PLUGIN_VERSION, App, - Doc, IndexedDocument, + PebbloLoaderAPIWrapper, generate_size_based_batches, get_full_path, get_loader_full_path, get_loader_type, get_runtime, + get_source_size, ) logger = logging.getLogger(__name__) @@ -37,7 +31,6 @@ class PebbloSafeLoader(BaseLoader): """ _discover_sent: bool = False - _loader_sent: bool = False def __init__( self, @@ -54,22 +47,17 @@ def __init__( if not name or not isinstance(name, str): raise NameError("Must specify a valid name.") self.app_name = name - self.api_key = os.environ.get("PEBBLO_API_KEY") or api_key self.load_id = str(uuid.uuid4()) self.loader = langchain_loader self.load_semantic = os.environ.get("PEBBLO_LOAD_SEMANTIC") or load_semantic self.owner = owner self.description = description self.source_path = get_loader_full_path(self.loader) - self.source_owner = PebbloSafeLoader.get_file_owner_from_path(self.source_path) self.docs: List[Document] = [] self.docs_with_id: List[IndexedDocument] = [] loader_name = str(type(self.loader)).split(".")[-1].split("'")[0] self.source_type = get_loader_type(loader_name) - self.source_path_size = self.get_source_size(self.source_path) - self.source_aggregate_size = 0 - self.classifier_url = classifier_url or CLASSIFIER_URL - self.classifier_location = classifier_location + self.source_path_size = get_source_size(self.source_path) self.batch_size = BATCH_SIZE_BYTES self.loader_details = { "loader": loader_name, @@ -83,7 +71,13 @@ def __init__( } # generate app self.app = self._get_app_details() - self._send_discover() + # initialize Pebblo Loader API client + self.pb_client = PebbloLoaderAPIWrapper( + api_key=api_key, + classifier_location=classifier_location, + classifier_url=classifier_url, + ) + self.pb_client.send_loader_discover(self.app) def load(self) -> List[Document]: """Load Documents. @@ -113,7 +107,12 @@ def classify_in_batches(self) -> None: is_last_batch: bool = i == total_batches - 1 self.docs = batch self.docs_with_id = self._index_docs() - classified_docs = self._classify_doc(loading_end=is_last_batch) + classified_docs = self.pb_client.classify_documents( + self.docs_with_id, + self.app, + self.loader_details, + loading_end=is_last_batch, + ) self._add_pebblo_specific_metadata(classified_docs) if self.load_semantic: batch_processed_docs = self._add_semantic_to_docs(classified_docs) @@ -147,7 +146,9 @@ def lazy_load(self) -> Iterator[Document]: break self.docs = list((doc,)) self.docs_with_id = self._index_docs() - classified_doc = self._classify_doc() + classified_doc = self.pb_client.classify_documents( + self.docs_with_id, self.app, self.loader_details + ) self._add_pebblo_specific_metadata(classified_doc) if self.load_semantic: self.docs = self._add_semantic_to_docs(classified_doc) @@ -159,263 +160,6 @@ def lazy_load(self) -> Iterator[Document]: def set_discover_sent(cls) -> None: cls._discover_sent = True - @classmethod - def set_loader_sent(cls) -> None: - cls._loader_sent = True - - def _classify_doc(self, loading_end: bool = False) -> dict: - """Send documents fetched from loader to pebblo-server. Then send - classified documents to Daxa cloud(If api_key is present). Internal method. - - Args: - - loading_end (bool, optional): Flag indicating the halt of data - loading by loader. Defaults to False. - """ - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - if loading_end is True: - PebbloSafeLoader.set_loader_sent() - doc_content = [doc.dict() for doc in self.docs_with_id] - docs = [] - for doc in doc_content: - doc_metadata = doc.get("metadata", {}) - doc_authorized_identities = doc_metadata.get("authorized_identities", []) - doc_source_path = get_full_path( - doc_metadata.get( - "full_path", doc_metadata.get("source", self.source_path) - ) - ) - doc_source_owner = doc_metadata.get( - "owner", PebbloSafeLoader.get_file_owner_from_path(doc_source_path) - ) - doc_source_size = doc_metadata.get( - "size", self.get_source_size(doc_source_path) - ) - page_content = str(doc.get("page_content")) - page_content_size = self.calculate_content_size(page_content) - self.source_aggregate_size += page_content_size - doc_id = doc.get("pb_id", None) or 0 - docs.append( - { - "doc": page_content, - "source_path": doc_source_path, - "pb_id": doc_id, - "last_modified": doc.get("metadata", {}).get("last_modified"), - "file_owner": doc_source_owner, - **( - {"authorized_identities": doc_authorized_identities} - if doc_authorized_identities - else {} - ), - **( - {"source_path_size": doc_source_size} - if doc_source_size is not None - else {} - ), - } - ) - payload: Dict[str, Any] = { - "name": self.app_name, - "owner": self.owner, - "docs": docs, - "plugin_version": PLUGIN_VERSION, - "load_id": self.load_id, - "loader_details": self.loader_details, - "loading_end": "false", - "source_owner": self.source_owner, - "classifier_location": self.classifier_location, - } - if loading_end is True: - payload["loading_end"] = "true" - if "loader_details" in payload: - payload["loader_details"]["source_aggregate_size"] = ( - self.source_aggregate_size - ) - payload = Doc(**payload).dict(exclude_unset=True) - classified_docs = {} - # Raw payload to be sent to classifier - if self.classifier_location == "local": - load_doc_url = f"{self.classifier_url}{LOADER_DOC_URL}" - try: - pebblo_resp = requests.post( - load_doc_url, headers=headers, json=payload, timeout=300 - ) - - # Updating the structure of pebblo response docs for efficient searching - for classified_doc in json.loads(pebblo_resp.text).get("docs", []): - classified_docs.update({classified_doc["pb_id"]: classified_doc}) - if pebblo_resp.status_code not in [ - HTTPStatus.OK, - HTTPStatus.BAD_GATEWAY, - ]: - logger.warning( - "Received unexpected HTTP response code: %s", - pebblo_resp.status_code, - ) - logger.debug( - "send_loader_doc[local]: request url %s, body %s len %s\ - response status %s body %s", - pebblo_resp.request.url, - str(pebblo_resp.request.body), - str( - len( - pebblo_resp.request.body if pebblo_resp.request.body else [] - ) - ), - str(pebblo_resp.status_code), - pebblo_resp.json(), - ) - except requests.exceptions.RequestException: - logger.warning("Unable to reach pebblo server.") - except Exception as e: - logger.warning("An Exception caught in _send_loader_doc: local %s", e) - - if self.api_key: - if self.classifier_location == "local": - docs = payload["docs"] - for doc_data in docs: - classified_data = classified_docs.get(doc_data["pb_id"], {}) - doc_data.update( - { - "pb_checksum": classified_data.get("pb_checksum", None), - "loader_source_path": classified_data.get( - "loader_source_path", None - ), - "entities": classified_data.get("entities", {}), - "topics": classified_data.get("topics", {}), - } - ) - doc_data.pop("doc") - - headers.update({"x-api-key": self.api_key}) - pebblo_cloud_url = f"{PEBBLO_CLOUD_URL}{LOADER_DOC_URL}" - try: - pebblo_cloud_response = requests.post( - pebblo_cloud_url, headers=headers, json=payload, timeout=20 - ) - logger.debug( - "send_loader_doc[cloud]: request url %s, body %s len %s\ - response status %s body %s", - pebblo_cloud_response.request.url, - str(pebblo_cloud_response.request.body), - str( - len( - pebblo_cloud_response.request.body - if pebblo_cloud_response.request.body - else [] - ) - ), - str(pebblo_cloud_response.status_code), - pebblo_cloud_response.json(), - ) - except requests.exceptions.RequestException: - logger.warning("Unable to reach Pebblo cloud server.") - except Exception as e: - logger.warning("An Exception caught in _send_loader_doc: cloud %s", e) - elif self.classifier_location == "pebblo-cloud": - logger.warning("API key is missing for sending docs to Pebblo cloud.") - raise NameError("API key is missing for sending docs to Pebblo cloud.") - - return classified_docs - - @staticmethod - def calculate_content_size(page_content: str) -> int: - """Calculate the content size in bytes: - - Encode the string to bytes using a specific encoding (e.g., UTF-8) - - Get the length of the encoded bytes. - - Args: - page_content (str): Data string. - - Returns: - int: Size of string in bytes. - """ - - # Encode the content to bytes using UTF-8 - encoded_content = page_content.encode("utf-8") - size = len(encoded_content) - return size - - def _send_discover(self) -> None: - """Send app discovery payload to pebblo-server. Internal method.""" - pebblo_resp = None - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - payload = self.app.dict(exclude_unset=True) - # Raw discover payload to be sent to classifier - if self.classifier_location == "local": - app_discover_url = f"{self.classifier_url}{APP_DISCOVER_URL}" - try: - pebblo_resp = requests.post( - app_discover_url, headers=headers, json=payload, timeout=20 - ) - logger.debug( - "send_discover[local]: request url %s, body %s len %s\ - response status %s body %s", - pebblo_resp.request.url, - str(pebblo_resp.request.body), - str( - len( - pebblo_resp.request.body if pebblo_resp.request.body else [] - ) - ), - str(pebblo_resp.status_code), - pebblo_resp.json(), - ) - if pebblo_resp.status_code in [HTTPStatus.OK, HTTPStatus.BAD_GATEWAY]: - PebbloSafeLoader.set_discover_sent() - else: - logger.warning( - f"Received unexpected HTTP response code:\ - {pebblo_resp.status_code}" - ) - except requests.exceptions.RequestException: - logger.warning("Unable to reach pebblo server.") - except Exception as e: - logger.warning("An Exception caught in _send_discover: local %s", e) - - if self.api_key: - try: - headers.update({"x-api-key": self.api_key}) - # If the pebblo_resp is None, - # then the pebblo server version is not available - if pebblo_resp: - pebblo_server_version = json.loads(pebblo_resp.text).get( - "pebblo_server_version" - ) - payload.update({"pebblo_server_version": pebblo_server_version}) - - payload.update({"pebblo_client_version": PLUGIN_VERSION}) - pebblo_cloud_url = f"{PEBBLO_CLOUD_URL}{APP_DISCOVER_URL}" - pebblo_cloud_response = requests.post( - pebblo_cloud_url, headers=headers, json=payload, timeout=20 - ) - - logger.debug( - "send_discover[cloud]: request url %s, body %s len %s\ - response status %s body %s", - pebblo_cloud_response.request.url, - str(pebblo_cloud_response.request.body), - str( - len( - pebblo_cloud_response.request.body - if pebblo_cloud_response.request.body - else [] - ) - ), - str(pebblo_cloud_response.status_code), - pebblo_cloud_response.json(), - ) - except requests.exceptions.RequestException: - logger.warning("Unable to reach Pebblo cloud server.") - except Exception as e: - logger.warning("An Exception caught in _send_discover: cloud %s", e) - def _get_app_details(self) -> App: """Fetch app details. Internal method. @@ -434,49 +178,6 @@ def _get_app_details(self) -> App: ) return app - @staticmethod - def get_file_owner_from_path(file_path: str) -> str: - """Fetch owner of local file path. - - Args: - file_path (str): Local file path. - - Returns: - str: Name of owner. - """ - try: - import pwd - - file_owner_uid = os.stat(file_path).st_uid - file_owner_name = pwd.getpwuid(file_owner_uid).pw_name - except Exception: - file_owner_name = "unknown" - return file_owner_name - - def get_source_size(self, source_path: str) -> int: - """Fetch size of source path. Source can be a directory or a file. - - Args: - source_path (str): Local path of data source. - - Returns: - int: Source size in bytes. - """ - if not source_path: - return 0 - size = 0 - if os.path.isfile(source_path): - size = os.path.getsize(source_path) - elif os.path.isdir(source_path): - total_size = 0 - for dirpath, _, filenames in os.walk(source_path): - for f in filenames: - fp = os.path.join(dirpath, f) - if not os.path.islink(fp): - total_size += os.path.getsize(fp) - size = total_size - return size - def _index_docs(self) -> List[IndexedDocument]: """ Indexes the documents and returns a list of IndexedDocument objects. diff --git a/libs/community/langchain_community/utilities/pebblo.py b/libs/community/langchain_community/utilities/pebblo.py index c61ce5bc000a0..50e5b408b99cb 100644 --- a/libs/community/langchain_community/utilities/pebblo.py +++ b/libs/community/langchain_community/utilities/pebblo.py @@ -1,25 +1,29 @@ from __future__ import annotations +import json import logging import os import pathlib import platform -from typing import List, Optional, Tuple +from enum import Enum +from http import HTTPStatus +from typing import Any, Dict, List, Optional, Tuple from langchain_core.documents import Document from langchain_core.env import get_runtime_environment from langchain_core.pydantic_v1 import BaseModel +from langchain_core.utils import get_from_dict_or_env +from requests import Response, request +from requests.exceptions import RequestException from langchain_community.document_loaders.base import BaseLoader logger = logging.getLogger(__name__) PLUGIN_VERSION = "0.1.1" -CLASSIFIER_URL = os.getenv("PEBBLO_CLASSIFIER_URL", "http://localhost:8000") -PEBBLO_CLOUD_URL = os.getenv("PEBBLO_CLOUD_URL", "https://api.daxa.ai") -LOADER_DOC_URL = "/v1/loader/doc" -APP_DISCOVER_URL = "/v1/app/discover" +_DEFAULT_CLASSIFIER_URL = "http://localhost:8000" +_DEFAULT_PEBBLO_CLOUD_URL = "https://api.daxa.ai" BATCH_SIZE_BYTES = 100 * 1024 # 100 KB # Supported loaders for Pebblo safe data loading @@ -59,9 +63,15 @@ "cloud-folder": cloud_folder, } -SUPPORTED_LOADERS = (*file_loader, *dir_loader, *in_memory) -logger = logging.getLogger(__name__) +class Routes(str, Enum): + """Routes available for the Pebblo API as enumerator.""" + + loader_doc = "/v1/loader/doc" + loader_app_discover = "/v1/app/discover" + retrieval_app_discover = "/v1/app/discover" + prompt = "/v1/prompt" + prompt_governance = "/v1/prompt/governance" class IndexedDocument(Document): @@ -342,3 +352,386 @@ def generate_size_based_batches( batches.append(current_batch) return batches + + +def get_file_owner_from_path(file_path: str) -> str: + """Fetch owner of local file path. + + Args: + file_path (str): Local file path. + + Returns: + str: Name of owner. + """ + try: + import pwd + + file_owner_uid = os.stat(file_path).st_uid + file_owner_name = pwd.getpwuid(file_owner_uid).pw_name + except Exception: + file_owner_name = "unknown" + return file_owner_name + + +def get_source_size(source_path: str) -> int: + """Fetch size of source path. Source can be a directory or a file. + + Args: + source_path (str): Local path of data source. + + Returns: + int: Source size in bytes. + """ + if not source_path: + return 0 + size = 0 + if os.path.isfile(source_path): + size = os.path.getsize(source_path) + elif os.path.isdir(source_path): + total_size = 0 + for dirpath, _, filenames in os.walk(source_path): + for f in filenames: + fp = os.path.join(dirpath, f) + if not os.path.islink(fp): + total_size += os.path.getsize(fp) + size = total_size + return size + + +def calculate_content_size(data: str) -> int: + """Calculate the content size in bytes: + - Encode the string to bytes using a specific encoding (e.g., UTF-8) + - Get the length of the encoded bytes. + + Args: + data (str): Data string. + + Returns: + int: Size of string in bytes. + """ + encoded_content = data.encode("utf-8") + size = len(encoded_content) + return size + + +class PebbloLoaderAPIWrapper(BaseModel): + """Wrapper for Pebblo Loader API.""" + + api_key: Optional[str] # Use SecretStr + """API key for Pebblo Cloud""" + classifier_location: str = "local" + """Location of the classifier, local or cloud. Defaults to 'local'""" + classifier_url: Optional[str] + """URL of the Pebblo Classifier""" + cloud_url: Optional[str] + """URL of the Pebblo Cloud""" + + def __init__(self, **kwargs: Any): + """Validate that api key in environment.""" + kwargs["api_key"] = get_from_dict_or_env( + kwargs, "api_key", "PEBBLO_API_KEY", "" + ) + kwargs["classifier_url"] = get_from_dict_or_env( + kwargs, "classifier_url", "PEBBLO_CLASSIFIER_URL", _DEFAULT_CLASSIFIER_URL + ) + kwargs["cloud_url"] = get_from_dict_or_env( + kwargs, "cloud_url", "PEBBLO_CLOUD_URL", _DEFAULT_PEBBLO_CLOUD_URL + ) + super().__init__(**kwargs) + + def send_loader_discover(self, app: App) -> None: + """ + Send app discovery request to Pebblo server & cloud. + + Args: + app (App): App instance to be discovered. + """ + pebblo_resp = None + payload = app.dict(exclude_unset=True) + + if self.classifier_location == "local": + # Send app details to local classifier + headers = self._make_headers() + app_discover_url = f"{self.classifier_url}{Routes.loader_app_discover}" + pebblo_resp = self.make_request("POST", app_discover_url, headers, payload) + + if self.api_key: + # Send app details to Pebblo cloud if api_key is present + headers = self._make_headers(cloud_request=True) + if pebblo_resp: + pebblo_server_version = json.loads(pebblo_resp.text).get( + "pebblo_server_version" + ) + payload.update({"pebblo_server_version": pebblo_server_version}) + + payload.update({"pebblo_client_version": PLUGIN_VERSION}) + pebblo_cloud_url = f"{self.cloud_url}{Routes.loader_app_discover}" + _ = self.make_request("POST", pebblo_cloud_url, headers, payload) + + def classify_documents( + self, + docs_with_id: List[IndexedDocument], + app: App, + loader_details: dict, + loading_end: bool = False, + ) -> dict: + """ + Send documents to Pebblo server for classification. + Then send classified documents to Daxa cloud(If api_key is present). + + Args: + docs_with_id (List[IndexedDocument]): List of documents to be classified. + app (App): App instance. + loader_details (dict): Loader details. + loading_end (bool): Boolean, indicating the halt of data loading by loader. + """ + source_path = loader_details.get("source_path", "") + source_owner = get_file_owner_from_path(source_path) + # Prepare docs for classification + docs, source_aggregate_size = self.prepare_docs_for_classification( + docs_with_id, source_path + ) + # Build payload for classification + payload = self.build_classification_payload( + app, docs, loader_details, source_owner, source_aggregate_size, loading_end + ) + + classified_docs = {} + if self.classifier_location == "local": + # Send docs to local classifier + headers = self._make_headers() + load_doc_url = f"{self.classifier_url}{Routes.loader_doc}" + try: + pebblo_resp = self.make_request( + "POST", load_doc_url, headers, payload, 300 + ) + + if pebblo_resp: + # Updating structure of pebblo response docs for efficient searching + for classified_doc in json.loads(pebblo_resp.text).get("docs", []): + classified_docs.update( + {classified_doc["pb_id"]: classified_doc} + ) + except Exception as e: + logger.warning("An Exception caught in classify_documents: local %s", e) + + if self.api_key: + # Send docs to Pebblo cloud if api_key is present + if self.classifier_location == "local": + # If local classifier is used add the classified information + # and remove doc content + self.update_doc_data(payload["docs"], classified_docs) + self.send_docs_to_pebblo_cloud(payload) + elif self.classifier_location == "pebblo-cloud": + logger.warning("API key is missing for sending docs to Pebblo cloud.") + raise NameError("API key is missing for sending docs to Pebblo cloud.") + + return classified_docs + + def send_docs_to_pebblo_cloud(self, payload: dict) -> None: + """ + Send documents to Pebblo cloud. + + Args: + payload (dict): The payload containing documents to be sent. + """ + headers = self._make_headers(cloud_request=True) + pebblo_cloud_url = f"{self.cloud_url}{Routes.loader_doc}" + try: + _ = self.make_request("POST", pebblo_cloud_url, headers, payload) + except Exception as e: + logger.warning("An Exception caught in classify_documents: cloud %s", e) + + def _make_headers(self, cloud_request: bool = False) -> dict: + """ + Generate headers for the request. + + args: + cloud_request (bool): flag indicating whether the request is for Pebblo + cloud. + returns: + dict: Headers for the request. + + """ + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + if cloud_request: + # Add API key for Pebblo cloud request + if self.api_key: + headers.update({"x-api-key": self.api_key}) + else: + logger.warning("API key is missing for Pebblo cloud request.") + return headers + + def build_classification_payload( + self, + app: App, + docs: List[dict], + loader_details: dict, + source_owner: str, + source_aggregate_size: int, + loading_end: bool, + ) -> dict: + """ + Build the payload for document classification. + + Args: + app (App): App instance. + docs (List[dict]): List of documents to be classified. + loader_details (dict): Loader details. + source_owner (str): Owner of the source. + source_aggregate_size (int): Aggregate size of the source. + loading_end (bool): Boolean indicating the halt of data loading by loader. + + Returns: + dict: Payload for document classification. + """ + payload: Dict[str, Any] = { + "name": app.name, + "owner": app.owner, + "docs": docs, + "plugin_version": PLUGIN_VERSION, + "load_id": app.load_id, + "loader_details": loader_details, + "loading_end": "false", + "source_owner": source_owner, + "classifier_location": self.classifier_location, + } + if loading_end is True: + payload["loading_end"] = "true" + if "loader_details" in payload: + payload["loader_details"]["source_aggregate_size"] = ( + source_aggregate_size + ) + payload = Doc(**payload).dict(exclude_unset=True) + return payload + + @staticmethod + def make_request( + method: str, + url: str, + headers: dict, + payload: Optional[dict] = None, + timeout: int = 20, + ) -> Optional[Response]: + """ + Make a request to the Pebblo API + + Args: + method (str): HTTP method (GET, POST, PUT, DELETE, etc.). + url (str): URL for the request. + headers (dict): Headers for the request. + payload (Optional[dict]): Payload for the request (for POST, PUT, etc.). + timeout (int): Timeout for the request in seconds. + + Returns: + Optional[Response]: Response object if the request is successful. + """ + try: + response = request( + method=method, url=url, headers=headers, json=payload, timeout=timeout + ) + logger.debug( + "Request: method %s, url %s, len %s response status %s", + method, + response.request.url, + str(len(response.request.body if response.request.body else [])), + str(response.status_code), + ) + + if response.status_code >= HTTPStatus.INTERNAL_SERVER_ERROR: + logger.warning(f"Pebblo Server: Error {response.status_code}") + elif response.status_code >= HTTPStatus.BAD_REQUEST: + logger.warning(f"Pebblo received an invalid payload: {response.text}") + elif response.status_code != HTTPStatus.OK: + logger.warning( + f"Pebblo returned an unexpected response code: " + f"{response.status_code}" + ) + + return response + except RequestException: + logger.warning("Unable to reach server %s", url) + except Exception as e: + logger.warning("An Exception caught in make_request: %s", e) + return None + + @staticmethod + def prepare_docs_for_classification( + docs_with_id: List[IndexedDocument], source_path: str + ) -> Tuple[List[dict], int]: + """ + Prepare documents for classification. + + Args: + docs_with_id (List[IndexedDocument]): List of documents to be classified. + source_path (str): Source path of the documents. + + Returns: + Tuple[List[dict], int]: Documents and the aggregate size of the source. + """ + docs = [] + source_aggregate_size = 0 + doc_content = [doc.dict() for doc in docs_with_id] + for doc in doc_content: + doc_metadata = doc.get("metadata", {}) + doc_authorized_identities = doc_metadata.get("authorized_identities", []) + doc_source_path = get_full_path( + doc_metadata.get( + "full_path", + doc_metadata.get("source", source_path), + ) + ) + doc_source_owner = doc_metadata.get( + "owner", get_file_owner_from_path(doc_source_path) + ) + doc_source_size = doc_metadata.get("size", get_source_size(doc_source_path)) + page_content = str(doc.get("page_content")) + page_content_size = calculate_content_size(page_content) + source_aggregate_size += page_content_size + doc_id = doc.get("pb_id", None) or 0 + docs.append( + { + "doc": page_content, + "source_path": doc_source_path, + "pb_id": doc_id, + "last_modified": doc.get("metadata", {}).get("last_modified"), + "file_owner": doc_source_owner, + **( + {"authorized_identities": doc_authorized_identities} + if doc_authorized_identities + else {} + ), + **( + {"source_path_size": doc_source_size} + if doc_source_size is not None + else {} + ), + } + ) + return docs, source_aggregate_size + + @staticmethod + def update_doc_data(docs: List[dict], classified_docs: dict) -> None: + """ + Update the document data with classified information. + + Args: + docs (List[dict]): List of document data to be updated. + classified_docs (dict): The dictionary containing classified documents. + """ + for doc_data in docs: + classified_data = classified_docs.get(doc_data["pb_id"], {}) + # Update the document data with classified information + doc_data.update( + { + "pb_checksum": classified_data.get("pb_checksum"), + "loader_source_path": classified_data.get("loader_source_path"), + "entities": classified_data.get("entities", {}), + "topics": classified_data.get("topics", {}), + } + ) + # Remove the document content + doc_data.pop("doc") diff --git a/libs/community/tests/unit_tests/document_loaders/test_pebblo.py b/libs/community/tests/unit_tests/document_loaders/test_pebblo.py index 2d6256b5044de..89617b9cd5fa3 100644 --- a/libs/community/tests/unit_tests/document_loaders/test_pebblo.py +++ b/libs/community/tests/unit_tests/document_loaders/test_pebblo.py @@ -144,4 +144,5 @@ def test_pebblo_safe_loader_api_key() -> None: ) # Assert - assert loader.api_key == api_key + assert loader.pb_client.api_key == api_key + assert loader.pb_client.classifier_location == "local" From 4ff2f4499e700748e560641ad14c828535f89b8e Mon Sep 17 00:00:00 2001 From: Rajendra Kadam Date: Thu, 22 Aug 2024 21:21:21 +0530 Subject: [PATCH 61/80] community: Refactor PebbloRetrievalQA (#25583) **Refactor PebbloRetrievalQA** - Created `APIWrapper` and moved API logic into it. - Created smaller functions/methods for better readability. - Properly read environment variables. - Removed unused code. - Updated models **Issue:** NA **Dependencies:** NA **tests**: NA --- .../chains/pebblo_retrieval/base.py | 358 +++--------------- .../chains/pebblo_retrieval/models.py | 10 +- .../chains/pebblo_retrieval/utilities.py | 340 ++++++++++++++++- 3 files changed, 391 insertions(+), 317 deletions(-) diff --git a/libs/community/langchain_community/chains/pebblo_retrieval/base.py b/libs/community/langchain_community/chains/pebblo_retrieval/base.py index 61eb638e442a2..93314301b4db9 100644 --- a/libs/community/langchain_community/chains/pebblo_retrieval/base.py +++ b/libs/community/langchain_community/chains/pebblo_retrieval/base.py @@ -5,12 +5,9 @@ import datetime import inspect -import json import logging -from http import HTTPStatus -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Dict, List, Optional -import requests # type: ignore from langchain.chains.base import Chain from langchain.chains.combine_documents.base import BaseCombineDocumentsChain from langchain_core.callbacks import ( @@ -29,16 +26,14 @@ from langchain_community.chains.pebblo_retrieval.models import ( App, AuthContext, - Qa, + ChainInfo, + Model, SemanticContext, + VectorDB, ) from langchain_community.chains.pebblo_retrieval.utilities import ( - APP_DISCOVER_URL, - CLASSIFIER_URL, - PEBBLO_CLOUD_URL, PLUGIN_VERSION, - PROMPT_GOV_URL, - PROMPT_URL, + PebbloRetrievalAPIWrapper, get_runtime, ) @@ -72,16 +67,18 @@ class PebbloRetrievalQA(Chain): """Description of app.""" api_key: Optional[str] = None #: :meta private: """Pebblo cloud API key for app.""" - classifier_url: str = CLASSIFIER_URL #: :meta private: + classifier_url: Optional[str] = None #: :meta private: """Classifier endpoint.""" classifier_location: str = "local" #: :meta private: """Classifier location. It could be either of 'local' or 'pebblo-cloud'.""" _discover_sent: bool = False #: :meta private: """Flag to check if discover payload has been sent.""" - _prompt_sent: bool = False #: :meta private: - """Flag to check if prompt payload has been sent.""" enable_prompt_gov: bool = True #: :meta private: """Flag to check if prompt governance is enabled or not""" + pb_client: PebbloRetrievalAPIWrapper = Field( + default_factory=PebbloRetrievalAPIWrapper + ) + """Pebblo Retrieval API client""" def _call( self, @@ -100,12 +97,11 @@ def _call( answer, docs = res['result'], res['source_documents'] """ prompt_time = datetime.datetime.now().isoformat() - PebbloRetrievalQA.set_prompt_sent(value=False) _run_manager = run_manager or CallbackManagerForChainRun.get_noop_manager() question = inputs[self.input_key] - auth_context = inputs.get(self.auth_context_key, {}) - semantic_context = inputs.get(self.semantic_context_key, {}) - _, prompt_entities = self._check_prompt_validity(question) + auth_context = inputs.get(self.auth_context_key) + semantic_context = inputs.get(self.semantic_context_key) + _, prompt_entities = self.pb_client.check_prompt_validity(question) accepts_run_manager = ( "run_manager" in inspect.signature(self._get_docs).parameters @@ -120,43 +116,17 @@ def _call( input_documents=docs, question=question, callbacks=_run_manager.get_child() ) - qa = { - "name": self.app_name, - "context": [ - { - "retrieved_from": doc.metadata.get( - "full_path", doc.metadata.get("source") - ), - "doc": doc.page_content, - "vector_db": self.retriever.vectorstore.__class__.__name__, - **( - {"pb_checksum": doc.metadata.get("pb_checksum")} - if doc.metadata.get("pb_checksum") - else {} - ), - } - for doc in docs - if isinstance(doc, Document) - ], - "prompt": { - "data": question, - "entities": prompt_entities.get("entities", {}), - "entityCount": prompt_entities.get("entityCount", 0), - "prompt_gov_enabled": self.enable_prompt_gov, - }, - "response": { - "data": answer, - }, - "prompt_time": prompt_time, - "user": auth_context.user_id if auth_context else "unknown", - "user_identities": auth_context.user_auth - if auth_context and hasattr(auth_context, "user_auth") - else [], - "classifier_location": self.classifier_location, - } - - qa_payload = Qa(**qa) - self._send_prompt(qa_payload) + self.pb_client.send_prompt( + self.app_name, + self.retriever, + question, + answer, + auth_context, + docs, + prompt_entities, + prompt_time, + self.enable_prompt_gov, + ) if self.return_source_documents: return {self.output_key: answer, "source_documents": docs} @@ -187,7 +157,7 @@ async def _acall( "run_manager" in inspect.signature(self._aget_docs).parameters ) - _, prompt_entities = self._check_prompt_validity(question) + _, prompt_entities = self.pb_client.check_prompt_validity(question) if accepts_run_manager: docs = await self._aget_docs( @@ -243,7 +213,7 @@ def from_chain_type( chain_type: str = "stuff", chain_type_kwargs: Optional[dict] = None, api_key: Optional[str] = None, - classifier_url: str = CLASSIFIER_URL, + classifier_url: Optional[str] = None, classifier_location: str = "local", **kwargs: Any, ) -> "PebbloRetrievalQA": @@ -263,14 +233,14 @@ def from_chain_type( llm=llm, **kwargs, ) - - PebbloRetrievalQA._send_discover( - app, + # initialize Pebblo API client + pb_client = PebbloRetrievalAPIWrapper( api_key=api_key, - classifier_url=classifier_url, classifier_location=classifier_location, + classifier_url=classifier_url, ) - + # send app discovery request + pb_client.send_app_discover(app) return cls( combine_documents_chain=combine_documents_chain, app_name=app_name, @@ -279,6 +249,7 @@ def from_chain_type( api_key=api_key, classifier_url=classifier_url, classifier_location=classifier_location, + pb_client=pb_client, **kwargs, ) @@ -346,259 +317,36 @@ def _get_app_details( # type: ignore ) return app - @staticmethod - def _send_discover( - app: App, - api_key: Optional[str], - classifier_url: str, - classifier_location: str, - ) -> None: # type: ignore - """Send app discovery payload to pebblo-server. Internal method.""" - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - payload = app.dict(exclude_unset=True) - if classifier_location == "local": - app_discover_url = f"{classifier_url}{APP_DISCOVER_URL}" - try: - pebblo_resp = requests.post( - app_discover_url, headers=headers, json=payload, timeout=20 - ) - logger.debug("discover-payload: %s", payload) - logger.debug( - "send_discover[local]: request url %s, body %s len %s\ - response status %s body %s", - pebblo_resp.request.url, - str(pebblo_resp.request.body), - str( - len( - pebblo_resp.request.body if pebblo_resp.request.body else [] - ) - ), - str(pebblo_resp.status_code), - pebblo_resp.json(), - ) - if pebblo_resp.status_code in [HTTPStatus.OK, HTTPStatus.BAD_GATEWAY]: - PebbloRetrievalQA.set_discover_sent() - else: - logger.warning( - "Received unexpected HTTP response code:" - + f"{pebblo_resp.status_code}" - ) - except requests.exceptions.RequestException: - logger.warning("Unable to reach pebblo server.") - except Exception as e: - logger.warning("An Exception caught in _send_discover: local %s", e) - - if api_key: - try: - headers.update({"x-api-key": api_key}) - pebblo_cloud_url = f"{PEBBLO_CLOUD_URL}{APP_DISCOVER_URL}" - pebblo_cloud_response = requests.post( - pebblo_cloud_url, headers=headers, json=payload, timeout=20 - ) - - logger.debug( - "send_discover[cloud]: request url %s, body %s len %s\ - response status %s body %s", - pebblo_cloud_response.request.url, - str(pebblo_cloud_response.request.body), - str( - len( - pebblo_cloud_response.request.body - if pebblo_cloud_response.request.body - else [] - ) - ), - str(pebblo_cloud_response.status_code), - pebblo_cloud_response.json(), - ) - except requests.exceptions.RequestException: - logger.warning("Unable to reach Pebblo cloud server.") - except Exception as e: - logger.warning("An Exception caught in _send_discover: cloud %s", e) - @classmethod def set_discover_sent(cls) -> None: cls._discover_sent = True @classmethod - def set_prompt_sent(cls, value: bool = True) -> None: - cls._prompt_sent = value - - def _send_prompt(self, qa_payload: Qa) -> None: - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - app_discover_url = f"{self.classifier_url}{PROMPT_URL}" - pebblo_resp = None - payload = qa_payload.dict(exclude_unset=True) - if self.classifier_location == "local": - try: - pebblo_resp = requests.post( - app_discover_url, - headers=headers, - json=payload, - timeout=20, - ) - logger.debug("prompt-payload: %s", payload) - logger.debug( - "send_prompt[local]: request url %s, body %s len %s\ - response status %s body %s", - pebblo_resp.request.url, - str(pebblo_resp.request.body), - str( - len( - pebblo_resp.request.body if pebblo_resp.request.body else [] - ) - ), - str(pebblo_resp.status_code), - pebblo_resp.json(), - ) - if pebblo_resp.status_code in [HTTPStatus.OK, HTTPStatus.BAD_GATEWAY]: - PebbloRetrievalQA.set_prompt_sent() - else: - logger.warning( - "Received unexpected HTTP response code:" - + f"{pebblo_resp.status_code}" - ) - except requests.exceptions.RequestException: - logger.warning("Unable to reach pebblo server.") - except Exception as e: - logger.warning("An Exception caught in _send_discover: local %s", e) - - # If classifier location is local, then response, context and prompt - # should be fetched from pebblo_resp and replaced in payload. - if self.api_key: - if self.classifier_location == "local": - if pebblo_resp: - resp = json.loads(pebblo_resp.text) - if resp: - payload["response"].update( - resp.get("retrieval_data", {}).get("response", {}) - ) - payload["response"].pop("data") - payload["prompt"].update( - resp.get("retrieval_data", {}).get("prompt", {}) - ) - payload["prompt"].pop("data") - context = payload["context"] - for context_data in context: - context_data.pop("doc") - payload["context"] = context - else: - payload["response"] = {} - payload["prompt"] = {} - payload["context"] = [] - headers.update({"x-api-key": self.api_key}) - pebblo_cloud_url = f"{PEBBLO_CLOUD_URL}{PROMPT_URL}" - try: - pebblo_cloud_response = requests.post( - pebblo_cloud_url, - headers=headers, - json=payload, - timeout=20, - ) - - logger.debug( - "send_prompt[cloud]: request url %s, body %s len %s\ - response status %s body %s", - pebblo_cloud_response.request.url, - str(pebblo_cloud_response.request.body), - str( - len( - pebblo_cloud_response.request.body - if pebblo_cloud_response.request.body - else [] - ) - ), - str(pebblo_cloud_response.status_code), - pebblo_cloud_response.json(), - ) - except requests.exceptions.RequestException: - logger.warning("Unable to reach Pebblo cloud server.") - except Exception as e: - logger.warning("An Exception caught in _send_prompt: cloud %s", e) - elif self.classifier_location == "pebblo-cloud": - logger.warning("API key is missing for sending prompt to Pebblo cloud.") - raise NameError("API key is missing for sending prompt to Pebblo cloud.") - - def _check_prompt_validity(self, question: str) -> Tuple[bool, Dict[str, Any]]: + def get_chain_details( + cls, llm: BaseLanguageModel, **kwargs: Any + ) -> List[ChainInfo]: """ - Check the validity of the given prompt using a remote classification service. - - This method sends a prompt to a remote classifier service and return entities - present in prompt or not. + Get chain details. Args: - question (str): The prompt question to be validated. + llm (BaseLanguageModel): Language model instance. + **kwargs: Additional keyword arguments. Returns: - bool: True if the prompt is valid (does not contain deny list entities), - False otherwise. - dict: The entities present in the prompt + List[ChainInfo]: Chain details. """ - - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - prompt_payload = {"prompt": question} - is_valid_prompt: bool = True - prompt_gov_api_url = f"{self.classifier_url}{PROMPT_GOV_URL}" - pebblo_resp = None - prompt_entities: dict = {"entities": {}, "entityCount": 0} - if self.classifier_location == "local": - try: - pebblo_resp = requests.post( - prompt_gov_api_url, - headers=headers, - json=prompt_payload, - timeout=20, - ) - - logger.debug("prompt-payload: %s", prompt_payload) - logger.debug( - "send_prompt[local]: request url %s, body %s len %s\ - response status %s body %s", - pebblo_resp.request.url, - str(pebblo_resp.request.body), - str( - len( - pebblo_resp.request.body if pebblo_resp.request.body else [] - ) - ), - str(pebblo_resp.status_code), - pebblo_resp.json(), - ) - logger.debug(f"pebblo_resp.json() {pebblo_resp.json()}") - prompt_entities["entities"] = pebblo_resp.json().get("entities", {}) - prompt_entities["entityCount"] = pebblo_resp.json().get( - "entityCount", 0 - ) - - except requests.exceptions.RequestException: - logger.warning("Unable to reach pebblo server.") - except Exception as e: - logger.warning("An Exception caught in _send_discover: local %s", e) - return is_valid_prompt, prompt_entities - - @classmethod - def get_chain_details(cls, llm: BaseLanguageModel, **kwargs): # type: ignore llm_dict = llm.__dict__ - chain = [ - { - "name": cls.__name__, - "model": { - "name": llm_dict.get("model_name", llm_dict.get("model")), - "vendor": llm.__class__.__name__, - }, - "vector_dbs": [ - { - "name": kwargs["retriever"].vectorstore.__class__.__name__, - "embedding_model": str( + chains = [ + ChainInfo( + name=cls.__name__, + model=Model( + name=llm_dict.get("model_name", llm_dict.get("model")), + vendor=llm.__class__.__name__, + ), + vector_dbs=[ + VectorDB( + name=kwargs["retriever"].vectorstore.__class__.__name__, + embedding_model=str( kwargs["retriever"].vectorstore._embeddings.model ) if hasattr(kwargs["retriever"].vectorstore, "_embeddings") @@ -607,8 +355,8 @@ def get_chain_details(cls, llm: BaseLanguageModel, **kwargs): # type: ignore if hasattr(kwargs["retriever"].vectorstore, "_embedding") else None ), - } + ) ], - }, + ), ] - return chain + return chains diff --git a/libs/community/langchain_community/chains/pebblo_retrieval/models.py b/libs/community/langchain_community/chains/pebblo_retrieval/models.py index e4fd7c64963ef..d5693404214b2 100644 --- a/libs/community/langchain_community/chains/pebblo_retrieval/models.py +++ b/libs/community/langchain_community/chains/pebblo_retrieval/models.py @@ -109,7 +109,7 @@ class VectorDB(BaseModel): embedding_model: Optional[str] = None -class Chains(BaseModel): +class ChainInfo(BaseModel): name: str model: Optional[Model] vector_dbs: Optional[List[VectorDB]] @@ -121,7 +121,7 @@ class App(BaseModel): description: Optional[str] runtime: Runtime framework: Framework - chains: List[Chains] + chains: List[ChainInfo] plugin_version: str @@ -134,9 +134,9 @@ class Context(BaseModel): class Prompt(BaseModel): data: Optional[Union[list, str]] - entityCount: Optional[int] - entities: Optional[dict] - prompt_gov_enabled: Optional[bool] + entityCount: Optional[int] = None + entities: Optional[dict] = None + prompt_gov_enabled: Optional[bool] = None class Qa(BaseModel): diff --git a/libs/community/langchain_community/chains/pebblo_retrieval/utilities.py b/libs/community/langchain_community/chains/pebblo_retrieval/utilities.py index 86218ad07b0e2..568fc560c0f91 100644 --- a/libs/community/langchain_community/chains/pebblo_retrieval/utilities.py +++ b/libs/community/langchain_community/chains/pebblo_retrieval/utilities.py @@ -1,22 +1,43 @@ +import json import logging import os import platform -from typing import Tuple +from enum import Enum +from http import HTTPStatus +from typing import Any, Dict, List, Optional, Tuple +from langchain_core.documents import Document from langchain_core.env import get_runtime_environment +from langchain_core.pydantic_v1 import BaseModel +from langchain_core.utils import get_from_dict_or_env +from langchain_core.vectorstores import VectorStoreRetriever +from requests import Response, request +from requests.exceptions import RequestException -from langchain_community.chains.pebblo_retrieval.models import Framework, Runtime +from langchain_community.chains.pebblo_retrieval.models import ( + App, + AuthContext, + Context, + Framework, + Prompt, + Qa, + Runtime, +) logger = logging.getLogger(__name__) PLUGIN_VERSION = "0.1.1" -CLASSIFIER_URL = os.getenv("PEBBLO_CLASSIFIER_URL", "http://localhost:8000") -PEBBLO_CLOUD_URL = os.getenv("PEBBLO_CLOUD_URL", "https://api.daxa.ai") +_DEFAULT_CLASSIFIER_URL = "http://localhost:8000" +_DEFAULT_PEBBLO_CLOUD_URL = "https://api.daxa.ai" -PROMPT_URL = "/v1/prompt" -PROMPT_GOV_URL = "/v1/prompt/governance" -APP_DISCOVER_URL = "/v1/app/discover" + +class Routes(str, Enum): + """Routes available for the Pebblo API as enumerator.""" + + retrieval_app_discover = "/v1/app/discover" + prompt = "/v1/prompt" + prompt_governance = "/v1/prompt/governance" def get_runtime() -> Tuple[Framework, Runtime]: @@ -64,3 +85,308 @@ def get_ip() -> str: except Exception: public_ip = socket.gethostbyname("localhost") return public_ip + + +class PebbloRetrievalAPIWrapper(BaseModel): + """Wrapper for Pebblo Retrieval API.""" + + api_key: Optional[str] # Use SecretStr + """API key for Pebblo Cloud""" + classifier_location: str = "local" + """Location of the classifier, local or cloud. Defaults to 'local'""" + classifier_url: Optional[str] + """URL of the Pebblo Classifier""" + cloud_url: Optional[str] + """URL of the Pebblo Cloud""" + + def __init__(self, **kwargs: Any): + """Validate that api key in environment.""" + kwargs["api_key"] = get_from_dict_or_env( + kwargs, "api_key", "PEBBLO_API_KEY", "" + ) + kwargs["classifier_url"] = get_from_dict_or_env( + kwargs, "classifier_url", "PEBBLO_CLASSIFIER_URL", _DEFAULT_CLASSIFIER_URL + ) + kwargs["cloud_url"] = get_from_dict_or_env( + kwargs, "cloud_url", "PEBBLO_CLOUD_URL", _DEFAULT_PEBBLO_CLOUD_URL + ) + super().__init__(**kwargs) + + def send_app_discover(self, app: App) -> None: + """ + Send app discovery request to Pebblo server & cloud. + + Args: + app (App): App instance to be discovered. + """ + pebblo_resp = None + payload = app.dict(exclude_unset=True) + + if self.classifier_location == "local": + # Send app details to local classifier + headers = self._make_headers() + app_discover_url = f"{self.classifier_url}{Routes.retrieval_app_discover}" + pebblo_resp = self.make_request("POST", app_discover_url, headers, payload) + + if self.api_key: + # Send app details to Pebblo cloud if api_key is present + headers = self._make_headers(cloud_request=True) + if pebblo_resp: + pebblo_server_version = json.loads(pebblo_resp.text).get( + "pebblo_server_version" + ) + payload.update({"pebblo_server_version": pebblo_server_version}) + + payload.update({"pebblo_client_version": PLUGIN_VERSION}) + pebblo_cloud_url = f"{self.cloud_url}{Routes.retrieval_app_discover}" + _ = self.make_request("POST", pebblo_cloud_url, headers, payload) + + def send_prompt( + self, + app_name: str, + retriever: VectorStoreRetriever, + question: str, + answer: str, + auth_context: Optional[AuthContext], + docs: List[Document], + prompt_entities: Dict[str, Any], + prompt_time: str, + prompt_gov_enabled: bool = False, + ) -> None: + """ + Send prompt to Pebblo server for classification. + Then send prompt to Daxa cloud(If api_key is present). + + Args: + app_name (str): Name of the app. + retriever (VectorStoreRetriever): Retriever instance. + question (str): Question asked in the prompt. + answer (str): Answer generated by the model. + auth_context (Optional[AuthContext]): Authentication context. + docs (List[Document]): List of documents retrieved. + prompt_entities (Dict[str, Any]): Entities present in the prompt. + prompt_time (str): Time when the prompt was generated. + prompt_gov_enabled (bool): Whether prompt governance is enabled. + """ + pebblo_resp = None + payload = self.build_prompt_qa_payload( + app_name, + retriever, + question, + answer, + auth_context, + docs, + prompt_entities, + prompt_time, + prompt_gov_enabled, + ) + + if self.classifier_location == "local": + # Send prompt to local classifier + headers = self._make_headers() + prompt_url = f"{self.classifier_url}{Routes.prompt}" + pebblo_resp = self.make_request("POST", prompt_url, headers, payload) + + if self.api_key: + # Send prompt to Pebblo cloud if api_key is present + if self.classifier_location == "local": + # If classifier location is local, then response, context and prompt + # should be fetched from pebblo_resp and replaced in payload. + pebblo_resp = pebblo_resp.json() if pebblo_resp else None + self.update_cloud_payload(payload, pebblo_resp) + + headers = self._make_headers(cloud_request=True) + pebblo_cloud_prompt_url = f"{self.cloud_url}{Routes.prompt}" + _ = self.make_request("POST", pebblo_cloud_prompt_url, headers, payload) + elif self.classifier_location == "pebblo-cloud": + logger.warning("API key is missing for sending prompt to Pebblo cloud.") + raise NameError("API key is missing for sending prompt to Pebblo cloud.") + + def check_prompt_validity(self, question: str) -> Tuple[bool, Dict[str, Any]]: + """ + Check the validity of the given prompt using a remote classification service. + + This method sends a prompt to a remote classifier service and return entities + present in prompt or not. + + Args: + question (str): The prompt question to be validated. + + Returns: + bool: True if the prompt is valid (does not contain deny list entities), + False otherwise. + dict: The entities present in the prompt + """ + prompt_payload = {"prompt": question} + prompt_entities: dict = {"entities": {}, "entityCount": 0} + is_valid_prompt: bool = True + if self.classifier_location == "local": + headers = self._make_headers() + prompt_gov_api_url = f"{self.classifier_url}{Routes.prompt_governance}" + pebblo_resp = self.make_request( + "POST", prompt_gov_api_url, headers, prompt_payload + ) + if pebblo_resp: + logger.debug(f"pebblo_resp.json() {pebblo_resp.json()}") + prompt_entities["entities"] = pebblo_resp.json().get("entities", {}) + prompt_entities["entityCount"] = pebblo_resp.json().get( + "entityCount", 0 + ) + return is_valid_prompt, prompt_entities + + def _make_headers(self, cloud_request: bool = False) -> dict: + """ + Generate headers for the request. + + args: + cloud_request (bool): flag indicating whether the request is for Pebblo + cloud. + returns: + dict: Headers for the request. + + """ + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + if cloud_request: + # Add API key for Pebblo cloud request + if self.api_key: + headers.update({"x-api-key": self.api_key}) + else: + logger.warning("API key is missing for Pebblo cloud request.") + return headers + + @staticmethod + def make_request( + method: str, + url: str, + headers: dict, + payload: Optional[dict] = None, + timeout: int = 20, + ) -> Optional[Response]: + """ + Make a request to the Pebblo server/cloud API. + + Args: + method (str): HTTP method (GET, POST, PUT, DELETE, etc.). + url (str): URL for the request. + headers (dict): Headers for the request. + payload (Optional[dict]): Payload for the request (for POST, PUT, etc.). + timeout (int): Timeout for the request in seconds. + + Returns: + Optional[Response]: Response object if the request is successful. + """ + try: + response = request( + method=method, url=url, headers=headers, json=payload, timeout=timeout + ) + logger.debug( + "Request: method %s, url %s, len %s response status %s", + method, + response.request.url, + str(len(response.request.body if response.request.body else [])), + str(response.status_code), + ) + + if response.status_code >= HTTPStatus.INTERNAL_SERVER_ERROR: + logger.warning(f"Pebblo Server: Error {response.status_code}") + elif response.status_code >= HTTPStatus.BAD_REQUEST: + logger.warning(f"Pebblo received an invalid payload: {response.text}") + elif response.status_code != HTTPStatus.OK: + logger.warning( + f"Pebblo returned an unexpected response code: " + f"{response.status_code}" + ) + + return response + except RequestException: + logger.warning("Unable to reach server %s", url) + except Exception as e: + logger.warning("An Exception caught in make_request: %s", e) + return None + + @staticmethod + def update_cloud_payload(payload: dict, pebblo_resp: Optional[dict]) -> None: + """ + Update the payload with response, prompt and context from Pebblo response. + + Args: + payload (dict): Payload to be updated. + pebblo_resp (Optional[dict]): Response from Pebblo server. + """ + if pebblo_resp: + # Update response, prompt and context from pebblo response + response = payload.get("response", {}) + response.update(pebblo_resp.get("retrieval_data", {}).get("response", {})) + response.pop("data", None) + prompt = payload.get("prompt", {}) + prompt.update(pebblo_resp.get("retrieval_data", {}).get("prompt", {})) + prompt.pop("data", None) + context = payload.get("context", []) + for context_data in context: + context_data.pop("doc", None) + else: + payload["response"] = {} + payload["prompt"] = {} + payload["context"] = [] + + def build_prompt_qa_payload( + self, + app_name: str, + retriever: VectorStoreRetriever, + question: str, + answer: str, + auth_context: Optional[AuthContext], + docs: List[Document], + prompt_entities: Dict[str, Any], + prompt_time: str, + prompt_gov_enabled: bool = False, + ) -> dict: + """ + Build the QA payload for the prompt. + + Args: + app_name (str): Name of the app. + retriever (VectorStoreRetriever): Retriever instance. + question (str): Question asked in the prompt. + answer (str): Answer generated by the model. + auth_context (Optional[AuthContext]): Authentication context. + docs (List[Document]): List of documents retrieved. + prompt_entities (Dict[str, Any]): Entities present in the prompt. + prompt_time (str): Time when the prompt was generated. + prompt_gov_enabled (bool): Whether prompt governance is enabled. + + Returns: + dict: The QA payload for the prompt. + """ + qa = Qa( + name=app_name, + context=[ + Context( + retrieved_from=doc.metadata.get( + "full_path", doc.metadata.get("source") + ), + doc=doc.page_content, + vector_db=retriever.vectorstore.__class__.__name__, + pb_checksum=doc.metadata.get("pb_checksum"), + ) + for doc in docs + if isinstance(doc, Document) + ], + prompt=Prompt( + data=question, + entities=prompt_entities.get("entities", {}), + entityCount=prompt_entities.get("entityCount", 0), + prompt_gov_enabled=prompt_gov_enabled, + ), + response=Prompt(data=answer), + prompt_time=prompt_time, + user=auth_context.user_id if auth_context else "unknown", + user_identities=auth_context.user_auth + if auth_context and hasattr(auth_context, "user_auth") + else [], + classifier_location=self.classifier_location, + ) + return qa.dict(exclude_unset=True) From 29c873dd695978f6a4a30b2eb65ddf98a2bd82c4 Mon Sep 17 00:00:00 2001 From: Brian Sam-Bodden Date: Thu, 22 Aug 2024 08:53:02 -0700 Subject: [PATCH 62/80] [docs]: update Redis (langchain-redis) documentation notebooks (vectorstore, llm caching, chat message history) (#25113) - **Description:** Adds notebooks for Redis Partner Package (langchain-redis) - **Issue:** N/A - **Dependencies:** None - **Twitter handle:** `@bsbodden` and `@redis` --------- Co-authored-by: Chester Curme --- .../caches/redis_llm_caching.ipynb | 424 +++++++ docs/docs/integrations/llm_caching.ipynb | 4 +- .../memory/redis_chat_message_history.ipynb | 316 +++-- .../integrations/vectorstores/redis.ipynb | 1073 ++++++++++------- 4 files changed, 1292 insertions(+), 525 deletions(-) create mode 100644 docs/docs/integrations/caches/redis_llm_caching.ipynb diff --git a/docs/docs/integrations/caches/redis_llm_caching.ipynb b/docs/docs/integrations/caches/redis_llm_caching.ipynb new file mode 100644 index 0000000000000..d30cd8241d94e --- /dev/null +++ b/docs/docs/integrations/caches/redis_llm_caching.ipynb @@ -0,0 +1,424 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Redis Cache for LangChain\n", + "\n", + "This notebook demonstrates how to use the `RedisCache` and `RedisSemanticCache` classes from the langchain-redis package to implement caching for LLM responses." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup\n", + "\n", + "First, let's install the required dependencies and ensure we have a Redis instance running." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%pip install -U langchain-core langchain-redis langchain-openai redis" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Ensure you have a Redis server running. You can start one using Docker with:\n", + "\n", + "```\n", + "docker run -d -p 6379:6379 redis:latest\n", + "```\n", + "\n", + "Or install and run Redis locally according to your operating system's instructions." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Connecting to Redis at: redis://redis:6379\n" + ] + } + ], + "source": [ + "import os\n", + "\n", + "# Use the environment variable if set, otherwise default to localhost\n", + "REDIS_URL = os.getenv(\"REDIS_URL\", \"redis://localhost:6379\")\n", + "print(f\"Connecting to Redis at: {REDIS_URL}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Importing Required Libraries" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "import time\n", + "\n", + "from langchain.globals import set_llm_cache\n", + "from langchain.schema import Generation\n", + "from langchain_openai import OpenAI, OpenAIEmbeddings\n", + "from langchain_redis import RedisCache, RedisSemanticCache" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "import langchain_core\n", + "import langchain_openai\n", + "import openai\n", + "import redis" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Set OpenAI API key" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OpenAI API key not found in environment variables.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Please enter your OpenAI API key: ········\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OpenAI API key has been set for this session.\n" + ] + } + ], + "source": [ + "from getpass import getpass\n", + "\n", + "# Check if OPENAI_API_KEY is already set in the environment\n", + "openai_api_key = os.getenv(\"OPENAI_API_KEY\")\n", + "\n", + "if not openai_api_key:\n", + " print(\"OpenAI API key not found in environment variables.\")\n", + " openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "\n", + " # Set the API key for the current session\n", + " os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", + " print(\"OpenAI API key has been set for this session.\")\n", + "else:\n", + " print(\"OpenAI API key found in environment variables.\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Using RedisCache" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "First call (not cached):\n", + "Result: \n", + "\n", + "Caching is the process of storing frequently accessed data in a temporary storage location for faster retrieval. This helps to reduce the time and resources needed to access the data from its original source. Caching is commonly used in computer systems, web browsers, and databases to improve performance and efficiency.\n", + "Time: 1.16 seconds\n", + "\n", + "Second call (cached):\n", + "Result: \n", + "\n", + "Caching is the process of storing frequently accessed data in a temporary storage location for faster retrieval. This helps to reduce the time and resources needed to access the data from its original source. Caching is commonly used in computer systems, web browsers, and databases to improve performance and efficiency.\n", + "Time: 0.05 seconds\n", + "\n", + "Speed improvement: 25.40x faster\n", + "Cache cleared\n" + ] + } + ], + "source": [ + "# Initialize RedisCache\n", + "redis_cache = RedisCache(redis_url=REDIS_URL)\n", + "\n", + "# Set the cache for LangChain to use\n", + "set_llm_cache(redis_cache)\n", + "\n", + "# Initialize the language model\n", + "llm = OpenAI(temperature=0)\n", + "\n", + "\n", + "# Function to measure execution time\n", + "def timed_completion(prompt):\n", + " start_time = time.time()\n", + " result = llm.invoke(prompt)\n", + " end_time = time.time()\n", + " return result, end_time - start_time\n", + "\n", + "\n", + "# First call (not cached)\n", + "prompt = \"Explain the concept of caching in three sentences.\"\n", + "result1, time1 = timed_completion(prompt)\n", + "print(f\"First call (not cached):\\nResult: {result1}\\nTime: {time1:.2f} seconds\\n\")\n", + "\n", + "# Second call (should be cached)\n", + "result2, time2 = timed_completion(prompt)\n", + "print(f\"Second call (cached):\\nResult: {result2}\\nTime: {time2:.2f} seconds\\n\")\n", + "\n", + "print(f\"Speed improvement: {time1 / time2:.2f}x faster\")\n", + "\n", + "# Clear the cache\n", + "redis_cache.clear()\n", + "print(\"Cache cleared\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Using RedisSemanticCache" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Original query:\n", + "Prompt: What is the capital of France?\n", + "Result: \n", + "\n", + "The capital of France is Paris.\n", + "Time: 1.52 seconds\n", + "\n", + "Similar query:\n", + "Prompt: Can you tell me the capital city of France?\n", + "Result: \n", + "\n", + "The capital of France is Paris.\n", + "Time: 0.29 seconds\n", + "\n", + "Speed improvement: 5.22x faster\n", + "Semantic cache cleared\n" + ] + } + ], + "source": [ + "# Initialize RedisSemanticCache\n", + "embeddings = OpenAIEmbeddings()\n", + "semantic_cache = RedisSemanticCache(\n", + " redis_url=REDIS_URL, embeddings=embeddings, distance_threshold=0.2\n", + ")\n", + "\n", + "# Set the cache for LangChain to use\n", + "set_llm_cache(semantic_cache)\n", + "\n", + "\n", + "# Function to test semantic cache\n", + "def test_semantic_cache(prompt):\n", + " start_time = time.time()\n", + " result = llm.invoke(prompt)\n", + " end_time = time.time()\n", + " return result, end_time - start_time\n", + "\n", + "\n", + "# Original query\n", + "original_prompt = \"What is the capital of France?\"\n", + "result1, time1 = test_semantic_cache(original_prompt)\n", + "print(\n", + " f\"Original query:\\nPrompt: {original_prompt}\\nResult: {result1}\\nTime: {time1:.2f} seconds\\n\"\n", + ")\n", + "\n", + "# Semantically similar query\n", + "similar_prompt = \"Can you tell me the capital city of France?\"\n", + "result2, time2 = test_semantic_cache(similar_prompt)\n", + "print(\n", + " f\"Similar query:\\nPrompt: {similar_prompt}\\nResult: {result2}\\nTime: {time2:.2f} seconds\\n\"\n", + ")\n", + "\n", + "print(f\"Speed improvement: {time1 / time2:.2f}x faster\")\n", + "\n", + "# Clear the semantic cache\n", + "semantic_cache.clear()\n", + "print(\"Semantic cache cleared\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Advanced Usage" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Custom TTL (Time-To-Live)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Cached result: Cached response\n", + "Waiting for TTL to expire...\n", + "Result after TTL: Not found (expired)\n" + ] + } + ], + "source": [ + "# Initialize RedisCache with custom TTL\n", + "ttl_cache = RedisCache(redis_url=REDIS_URL, ttl=5) # 60 seconds TTL\n", + "\n", + "# Update a cache entry\n", + "ttl_cache.update(\"test_prompt\", \"test_llm\", [Generation(text=\"Cached response\")])\n", + "\n", + "# Retrieve the cached entry\n", + "cached_result = ttl_cache.lookup(\"test_prompt\", \"test_llm\")\n", + "print(f\"Cached result: {cached_result[0].text if cached_result else 'Not found'}\")\n", + "\n", + "# Wait for TTL to expire\n", + "print(\"Waiting for TTL to expire...\")\n", + "time.sleep(6)\n", + "\n", + "# Try to retrieve the expired entry\n", + "expired_result = ttl_cache.lookup(\"test_prompt\", \"test_llm\")\n", + "print(\n", + " f\"Result after TTL: {expired_result[0].text if expired_result else 'Not found (expired)'}\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Customizing RedisSemanticCache" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Original result: \n", + "\n", + "The largest planet in our solar system is Jupiter.\n", + "Similar query result: \n", + "\n", + "The largest planet in our solar system is Jupiter.\n" + ] + } + ], + "source": [ + "# Initialize RedisSemanticCache with custom settings\n", + "custom_semantic_cache = RedisSemanticCache(\n", + " redis_url=REDIS_URL,\n", + " embeddings=embeddings,\n", + " distance_threshold=0.1, # Stricter similarity threshold\n", + " ttl=3600, # 1 hour TTL\n", + " name=\"custom_cache\", # Custom cache name\n", + ")\n", + "\n", + "# Test the custom semantic cache\n", + "set_llm_cache(custom_semantic_cache)\n", + "\n", + "test_prompt = \"What's the largest planet in our solar system?\"\n", + "result, _ = test_semantic_cache(test_prompt)\n", + "print(f\"Original result: {result}\")\n", + "\n", + "# Try a slightly different query\n", + "similar_test_prompt = \"Which planet is the biggest in the solar system?\"\n", + "similar_result, _ = test_semantic_cache(similar_test_prompt)\n", + "print(f\"Similar query result: {similar_result}\")\n", + "\n", + "# Clean up\n", + "custom_semantic_cache.clear()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Conclusion\n", + "\n", + "This notebook demonstrated the usage of `RedisCache` and `RedisSemanticCache` from the langchain-redis package. These caching mechanisms can significantly improve the performance of LLM-based applications by reducing redundant API calls and leveraging semantic similarity for intelligent caching. The Redis-based implementation provides a fast, scalable, and flexible solution for caching in distributed systems." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/docs/docs/integrations/llm_caching.ipynb b/docs/docs/integrations/llm_caching.ipynb index 72d058e296098..cac516c24bd0e 100644 --- a/docs/docs/integrations/llm_caching.ipynb +++ b/docs/docs/integrations/llm_caching.ipynb @@ -457,7 +457,9 @@ "tags": [] }, "source": [ - "## `Redis` Cache" + "## `Redis` Cache\n", + "\n", + "See the main [Redis cache docs](/docs/integrations/caches/redis_llm_caching/) for detail." ] }, { diff --git a/docs/docs/integrations/memory/redis_chat_message_history.ipynb b/docs/docs/integrations/memory/redis_chat_message_history.ipynb index 760cdc0d09662..3b23833b0fba2 100644 --- a/docs/docs/integrations/memory/redis_chat_message_history.ipynb +++ b/docs/docs/integrations/memory/redis_chat_message_history.ipynb @@ -2,171 +2,347 @@ "cells": [ { "cell_type": "markdown", - "id": "91c6a7ef", "metadata": {}, "source": [ - "# Redis\n", + "# Redis Chat Message History\n", "\n", - ">[Redis (Remote Dictionary Server)](https://en.wikipedia.org/wiki/Redis) is an open-source in-memory storage, used as a distributed, in-memory key–value database, cache and message broker, with optional durability. Because it holds all data in memory and because of its design, `Redis` offers low-latency reads and writes, making it particularly suitable for use cases that require a cache. Redis is the most popular NoSQL database, and one of the most popular databases overall.\n", + ">[Redis (Remote Dictionary Server)](https://en.wikipedia.org/wiki/Redis) is an open-source in-memory storage, used as a distributed, in-memory key–value database, cache and message broker, with optional durability. `Redis` offers low-latency reads and writes. Redis is the most popular NoSQL database, and one of the most popular databases overall.\n", "\n", - "This notebook goes over how to use `Redis` to store chat message history." + "This notebook demonstrates how to use the `RedisChatMessageHistory` class from the langchain-redis package to store and manage chat message history using Redis." ] }, { "cell_type": "markdown", - "id": "897a4682-f9fc-488b-98f3-ae2acad84600", "metadata": {}, "source": [ "## Setup\n", - "First we need to install dependencies, and start a redis instance using commands like: `redis-server`." + "\n", + "First, we need to install the required dependencies and ensure we have a Redis instance running." ] }, { "cell_type": "code", "execution_count": null, - "id": "cda8b56d-baf7-49a2-91a2-4d424a8519cb", "metadata": {}, "outputs": [], "source": [ - "pip install -U langchain-community redis" + "%pip install -qU langchain-redis langchain-openai redis" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Make sure you have a Redis server running. You can start one using Docker with the following command:\n", + "\n", + "```\n", + "docker run -d -p 6379:6379 redis:latest\n", + "```\n", + "\n", + "Or install and run Redis locally according to the instructions for your operating system." ] }, { "cell_type": "code", - "execution_count": null, - "id": "b11090e7-284b-4ed2-9790-ce0d35638717", + "execution_count": 2, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Connecting to Redis at: redis://redis:6379\n" + ] + } + ], "source": [ - "from langchain_community.chat_message_histories import RedisChatMessageHistory" + "import os\n", + "\n", + "# Use the environment variable if set, otherwise default to localhost\n", + "REDIS_URL = os.getenv(\"REDIS_URL\", \"redis://localhost:6379\")\n", + "print(f\"Connecting to Redis at: {REDIS_URL}\")" ] }, { "cell_type": "markdown", - "id": "20b99474-75ea-422e-9809-fbdb9d103afc", "metadata": {}, "source": [ - "## Store and Retrieve Messages" + "## Importing Required Libraries" ] }, { "cell_type": "code", "execution_count": 3, - "id": "d15e3302", "metadata": {}, "outputs": [], "source": [ - "history = RedisChatMessageHistory(\"foo\", url=\"redis://localhost:6379\")\n", - "\n", - "history.add_user_message(\"hi!\")\n", - "\n", - "history.add_ai_message(\"whats up?\")" + "from langchain_core.chat_history import BaseChatMessageHistory\n", + "from langchain_core.messages import AIMessage, HumanMessage\n", + "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n", + "from langchain_core.runnables.history import RunnableWithMessageHistory\n", + "from langchain_openai import ChatOpenAI\n", + "from langchain_redis import RedisChatMessageHistory" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Basic Usage of RedisChatMessageHistory" ] }, { "cell_type": "code", "execution_count": 4, - "id": "64fc465e", "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "[HumanMessage(content='hi!'), AIMessage(content='whats up?')]" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "Chat History:\n", + "HumanMessage: Hello, AI assistant!\n", + "AIMessage: Hello! How can I assist you today?\n" + ] } ], "source": [ - "history.messages" + "# Initialize RedisChatMessageHistory\n", + "history = RedisChatMessageHistory(session_id=\"user_123\", redis_url=REDIS_URL)\n", + "\n", + "# Add messages to the history\n", + "history.add_user_message(\"Hello, AI assistant!\")\n", + "history.add_ai_message(\"Hello! How can I assist you today?\")\n", + "\n", + "# Retrieve messages\n", + "print(\"Chat History:\")\n", + "for message in history.messages:\n", + " print(f\"{type(message).__name__}: {message.content}\")" ] }, { "cell_type": "markdown", - "id": "465fdd8c-b093-4d19-a55a-30f3b646432b", "metadata": {}, "source": [ - "## Using in the Chains" + "## Using RedisChatMessageHistory with Language Models" ] }, { - "cell_type": "code", - "execution_count": null, - "id": "94d65d2f-e9bb-4b47-a86d-dd6b1b5e8247", + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "pip install -U langchain-openai" + "### Set OpenAI API key" ] }, { "cell_type": "code", "execution_count": 5, - "id": "ace3e7b2-5e3e-4966-b549-04952a6a9a09", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OpenAI API key not found in environment variables.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Please enter your OpenAI API key: ········\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OpenAI API key has been set for this session.\n" + ] + } + ], "source": [ - "from typing import Optional\n", + "from getpass import getpass\n", "\n", - "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n", - "from langchain_core.runnables.history import RunnableWithMessageHistory\n", - "from langchain_openai import ChatOpenAI" + "# Check if OPENAI_API_KEY is already set in the environment\n", + "openai_api_key = os.getenv(\"OPENAI_API_KEY\")\n", + "\n", + "if not openai_api_key:\n", + " print(\"OpenAI API key not found in environment variables.\")\n", + " openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "\n", + " # Set the API key for the current session\n", + " os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", + " print(\"OpenAI API key has been set for this session.\")\n", + "else:\n", + " print(\"OpenAI API key found in environment variables.\")" ] }, { "cell_type": "code", "execution_count": 6, - "id": "5c1fba0d-d06a-4695-ba14-c42a3461ada1", "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "AIMessage(content='Your name is Bob, as you mentioned earlier. Is there anything specific you would like assistance with, Bob?')" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "AI Response 1: Hello Alice! How can I assist you today?\n", + "AI Response 2: Your name is Alice.\n" + ] } ], "source": [ + "# Create a prompt template\n", "prompt = ChatPromptTemplate.from_messages(\n", " [\n", - " (\"system\", \"You're an assistant。\"),\n", + " (\"system\", \"You are a helpful AI assistant.\"),\n", " MessagesPlaceholder(variable_name=\"history\"),\n", - " (\"human\", \"{question}\"),\n", + " (\"human\", \"{input}\"),\n", " ]\n", ")\n", "\n", - "chain = prompt | ChatOpenAI()\n", + "# Initialize the language model\n", + "llm = ChatOpenAI()\n", + "\n", + "# Create the conversational chain\n", + "chain = prompt | llm\n", + "\n", + "\n", + "# Function to get or create a RedisChatMessageHistory instance\n", + "def get_redis_history(session_id: str) -> BaseChatMessageHistory:\n", + " return RedisChatMessageHistory(session_id, redis_url=REDIS_URL)\n", "\n", + "\n", + "# Create a runnable with message history\n", "chain_with_history = RunnableWithMessageHistory(\n", - " chain,\n", - " lambda session_id: RedisChatMessageHistory(\n", - " session_id, url=\"redis://localhost:6379\"\n", - " ),\n", - " input_messages_key=\"question\",\n", - " history_messages_key=\"history\",\n", + " chain, get_redis_history, input_messages_key=\"input\", history_messages_key=\"history\"\n", ")\n", "\n", - "config = {\"configurable\": {\"session_id\": \"foo\"}}\n", + "# Use the chain in a conversation\n", + "response1 = chain_with_history.invoke(\n", + " {\"input\": \"Hi, my name is Alice.\"},\n", + " config={\"configurable\": {\"session_id\": \"alice_123\"}},\n", + ")\n", + "print(\"AI Response 1:\", response1.content)\n", "\n", - "chain_with_history.invoke({\"question\": \"Hi! I'm bob\"}, config=config)\n", + "response2 = chain_with_history.invoke(\n", + " {\"input\": \"What's my name?\"}, config={\"configurable\": {\"session_id\": \"alice_123\"}}\n", + ")\n", + "print(\"AI Response 2:\", response2.content)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Advanced Features" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Custom Redis Configuration" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Custom History: [HumanMessage(content='This is a message with custom configuration.')]\n" + ] + } + ], + "source": [ + "# Initialize with custom Redis configuration\n", + "custom_history = RedisChatMessageHistory(\n", + " \"user_456\",\n", + " redis_url=REDIS_URL,\n", + " key_prefix=\"custom_prefix:\",\n", + " ttl=3600, # Set TTL to 1 hour\n", + " index_name=\"custom_index\",\n", + ")\n", "\n", - "chain_with_history.invoke({\"question\": \"Whats my name\"}, config=config)" + "custom_history.add_user_message(\"This is a message with custom configuration.\")\n", + "print(\"Custom History:\", custom_history.messages)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Searching Messages" ] }, { "cell_type": "code", - "execution_count": null, - "id": "76ce3f6b-f4c7-4d27-8031-60f7dd756695", + "execution_count": 8, "metadata": {}, - "outputs": [], - "source": [] + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Search Results:\n", + "human: Tell me about artificial intelligence....\n", + "ai: Artificial Intelligence (AI) is a branch of comput...\n" + ] + } + ], + "source": [ + "# Add more messages\n", + "history.add_user_message(\"Tell me about artificial intelligence.\")\n", + "history.add_ai_message(\n", + " \"Artificial Intelligence (AI) is a branch of computer science...\"\n", + ")\n", + "\n", + "# Search for messages containing a specific term\n", + "search_results = history.search_messages(\"artificial intelligence\")\n", + "print(\"Search Results:\")\n", + "for result in search_results:\n", + " print(f\"{result['type']}: {result['content'][:50]}...\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Clearing History" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Messages after clearing: []\n" + ] + } + ], + "source": [ + "# Clear the chat history\n", + "history.clear()\n", + "print(\"Messages after clearing:\", history.messages)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Conclusion\n", + "\n", + "This notebook demonstrated the key features of `RedisChatMessageHistory` from the langchain-redis package. It showed how to initialize and use the chat history, integrate it with language models, and utilize advanced features like custom configurations and message searching. Redis provides a fast and scalable solution for managing chat history in AI applications." + ] } ], "metadata": { @@ -185,9 +361,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.11.9" } }, "nbformat": 4, - "nbformat_minor": 5 + "nbformat_minor": 4 } diff --git a/docs/docs/integrations/vectorstores/redis.ipynb b/docs/docs/integrations/vectorstores/redis.ipynb index 6230bff240acd..daf7040bc4c2d 100644 --- a/docs/docs/integrations/vectorstores/redis.ipynb +++ b/docs/docs/integrations/vectorstores/redis.ipynb @@ -1,21 +1,40 @@ { "cells": [ + { + "cell_type": "raw", + "id": "1957f5cb", + "metadata": {}, + "source": [ + "---\n", + "sidebar_label: Redis\n", + "---" + ] + }, { "cell_type": "markdown", + "id": "ef1f0986", "metadata": {}, "source": [ - "# Redis\n", + "# Redis Vector Store\n", "\n", - ">[Redis vector database](https://redis.io/docs/get-started/vector-database/) introduction and langchain integration guide.\n", + "This notebook covers how to get started with the Redis vector store.\n", "\n", - "## What is Redis?\n", + ">[Redis](https://redis.io/docs/stack/vectorsearch/) is a popular open-source, in-memory data structure store that can be used as a database, cache, message broker, and queue. It now includes vector similarity search capabilities, making it suitable for use as a vector store." + ] + }, + { + "cell_type": "markdown", + "id": "a717e10e-c8d7-41bc-9dd0-447d11f90b68", + "metadata": {}, + "source": [ + "### What is Redis?\n", "\n", - "Most developers from a web services background are familiar with `Redis`. At its core, `Redis` is an open-source key-value store that is used as a cache, message broker, and database. Developers choose `Redis` because it is fast, has a large ecosystem of client libraries, and has been deployed by major enterprises for years.\n", + "Most developers are familiar with `Redis`. At its core, `Redis` is a NoSQL Database in the key-value family that can used as a cache, message broker, stream processing and a primary database. Developers choose `Redis` because it is fast, has a large ecosystem of client libraries, and has been deployed by major enterprises for years.\n", "\n", "On top of these traditional use cases, `Redis` provides additional capabilities like the Search and Query capability that allows users to create secondary index structures within `Redis`. This allows `Redis` to be a Vector Database, at the speed of a cache. \n", "\n", "\n", - "## Redis as a Vector Database\n", + "### Redis as a Vector Database\n", "\n", "`Redis` uses compressed, inverted indexes for fast indexing with a low memory footprint. It also supports a number of advanced features such as:\n", "\n", @@ -38,9 +57,7 @@ "* Retrieve full documents, selected fields, or only the document IDs\n", "* Sorting results (for example, by creation date)\n", "\n", - "\n", - "\n", - "## Clients\n", + "### Clients\n", "\n", "Since `Redis` is much more than just a vector database, there are often use cases that demand the usage of a `Redis` client besides just the `LangChain` integration. You can use any standard `Redis` client library to run Search and Query commands, but it's easiest to use a library that wraps the Search and Query API. Below are a few examples, but you can find more client libraries [here](https://redis.io/resources/clients/).\n", "\n", @@ -54,8 +71,8 @@ "\n", "[redis-url]: https://redis.com\n", "\n", - "[redisvl-url]: https://github.com/RedisVentures/redisvl\n", - "[redisvl-stars]: https://img.shields.io/github/stars/RedisVentures/redisvl.svg?style=social&label=Star&maxAge=2592000\n", + "[redisvl-url]: https://github.com/redis/redis-vl-python\n", + "[redisvl-stars]: https://img.shields.io/github/stars/redis/redisvl.svg?style=social&label=Star&maxAge=2592000\n", "[redisvl-package]: https://pypi.python.org/pypi/redisvl\n", "\n", "[redis-py-url]: https://github.com/redis/redis-py\n", @@ -87,7 +104,7 @@ "[redisearch-api-rs-stars]: https://img.shields.io/github/stars/RediSearch/redisearch-api-rs.svg?style=social&label=Star&maxAge=2592000\n", "\n", "\n", - "## Deployment options\n", + "### Deployment options\n", "\n", "There are many ways to deploy Redis with RediSearch. The easiest way to get started is to use Docker, but there are are many potential options for deployment such as\n", "\n", @@ -96,80 +113,82 @@ "- Cloud marketplaces: [AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-e6y7ork67pjwg?sr=0-2&ref_=beagle&applicationId=AWSMPContessa), [Google Marketplace](https://console.cloud.google.com/marketplace/details/redislabs-public/redis-enterprise?pli=1), or [Azure Marketplace](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/garantiadata.redis_enterprise_1sp_public_preview?tab=Overview)\n", "- On-premise: [Redis Enterprise Software](https://redis.com/redis-enterprise-software/overview/)\n", "- Kubernetes: [Redis Enterprise Software on Kubernetes](https://docs.redis.com/latest/kubernetes/)\n", + " \n", + "### Redis connection Url schemas\n", "\n", + "Valid Redis Url schemas are:\n", + "1. `redis://` - Connection to Redis standalone, unencrypted\n", + "2. `rediss://` - Connection to Redis standalone, with TLS encryption\n", + "3. `redis+sentinel://` - Connection to Redis server via Redis Sentinel, unencrypted\n", + "4. `rediss+sentinel://` - Connection to Redis server via Redis Sentinel, booth connections with TLS encryption\n", "\n", - "## Additional examples\n", - "\n", - "Many examples can be found in the [Redis AI team's GitHub](https://github.com/RedisVentures/)\n", - "\n", - "- [Awesome Redis AI Resources](https://github.com/RedisVentures/redis-ai-resources) - List of examples of using Redis in AI workloads\n", - "- [Azure OpenAI Embeddings Q&A](https://github.com/ruoccofabrizio/azure-open-ai-embeddings-qna) - OpenAI and Redis as a Q&A service on Azure.\n", - "- [ArXiv Paper Search](https://github.com/RedisVentures/redis-arXiv-search) - Semantic search over arXiv scholarly papers\n", - "- [Vector Search on Azure](https://learn.microsoft.com/azure/azure-cache-for-redis/cache-tutorial-vector-similarity) - Vector search on Azure using Azure Cache for Redis and Azure OpenAI\n", - "\n", - "\n", - "## More resources\n", - "\n", - "For more information on how to use Redis as a vector database, check out the following resources:\n", - "\n", - "- [RedisVL Documentation](https://redisvl.com) - Documentation for the Redis Vector Library Client\n", - "- [Redis Vector Similarity Docs](https://redis.io/docs/stack/search/reference/vectors/) - Redis official docs for Vector Search.\n", - "- [Redis-py Search Docs](https://redis.readthedocs.io/en/latest/redismodules.html#redisearch-commands) - Documentation for redis-py client library\n", - "- [Vector Similarity Search: From Basics to Production](https://mlops.community/vector-similarity-search-from-basics-to-production/) - Introductory blog post to VSS and Redis as a VectorDB." + "More information about additional connection parameters can be found in the [redis-py documentation](https://redis-py.readthedocs.io/en/stable/connections.html)." ] }, { "cell_type": "markdown", + "id": "36fdc060", "metadata": {}, "source": [ "## Setup\n", "\n", - "`Redis-py` is the officially supported client by Redis. Recently released is the `RedisVL` client which is purpose-built for the Vector Database use cases. Both can be installed with pip." + "To use the RedisVectorStore, you'll need to install the `langchain-redis` partner package, as well as the other packages used throughout this notebook." ] }, { "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], + "execution_count": 1, + "id": "64e28aa6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], "source": [ - "%pip install -qU redis redisvl langchain-community" + "%pip install -qU langchain-redis langchain-huggingface sentence-transformers scikit-learn" ] }, { "cell_type": "markdown", + "id": "37d388a3-1a56-453e-8f84-e24a72d682eb", "metadata": {}, "source": [ - "### Deploy Redis locally\n", + "### Credentials\n", "\n", - "To locally deploy Redis, run:\n", + "Redis connection credentials are passed as part of the Redis Connection URL. Redis Connection URLs are versatile and can accommodate various Redis server topologies and authentication methods. These URLs follow a specific format that includes the connection protocol, authentication details, host, port, and database information.\n", + "The basic structure of a Redis Connection URL is:\n", "\n", - "```console\n", - "docker run -d -p 6379:6379 -p 8001:8001 redis/redis-stack:latest\n", "```\n", - "If things are running correctly you should see a nice Redis UI at `http://localhost:8001`. See the [Deployment options](#deployment-options) section above for other ways to deploy." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Redis connection Url schemas\n", + "[protocol]://[auth]@[host]:[port]/[database]\n", + "```\n", "\n", - "Valid Redis Url schemas are:\n", - "1. `redis://` - Connection to Redis standalone, unencrypted\n", - "2. `rediss://` - Connection to Redis standalone, with TLS encryption\n", - "3. `redis+sentinel://` - Connection to Redis server via Redis Sentinel, unencrypted\n", - "4. `rediss+sentinel://` - Connection to Redis server via Redis Sentinel, booth connections with TLS encryption\n", + "Where:\n", "\n", - "More information about additional connection parameters can be found in the [redis-py documentation](https://redis-py.readthedocs.io/en/stable/connections.html)." + "* protocol can be redis for standard connections, rediss for SSL/TLS connections, or redis+sentinel for Sentinel connections.\n", + "* auth includes username and password (if applicable).\n", + "* host is the Redis server hostname or IP address.\n", + "* port is the Redis server port.\n", + "* database is the Redis database number.\n", + "\n", + "Redis Connection URLs support various configurations, including:\n", + "\n", + "* Standalone Redis servers (with or without authentication)\n", + "* Redis Sentinel setups\n", + "* SSL/TLS encrypted connections\n", + "* Different authentication methods (password-only or username-password)\n", + "\n", + "Below are examples of Redis Connection URLs for different configurations:" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, + "id": "b1b1eb90-5155-44ca-a8a7-b04b02d5e77c", "metadata": {}, "outputs": [], "source": [ @@ -196,14 +215,53 @@ }, { "cell_type": "markdown", + "id": "9695dee7", "metadata": {}, "source": [ - "If you want to get best in-class automated tracing of your model calls you can also set your [LangSmith](https://docs.smith.langchain.com/) API key by uncommenting below:" + "### Launching a Redis Instance with Docker\n", + "\n", + "To use Redis with LangChain, you need a running Redis instance. You can start one using Docker with:\n", + "\n", + "```bash\n", + "docker run -d -p 6379:6379 redis/redis-stack:latest\n", + "```\n", + "\n", + "For this example, we'll use a local Redis instance. If you're using a remote instance, you'll need to modify the Redis URL accordingly." ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, + "id": "894c30e4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Connecting to Redis at: redis://redis:6379\n" + ] + } + ], + "source": [ + "import os\n", + "\n", + "REDIS_URL = os.getenv(\"REDIS_URL\", \"redis://localhost:6379\")\n", + "print(f\"Connecting to Redis at: {REDIS_URL}\")" + ] + }, + { + "cell_type": "markdown", + "id": "7f98392b", + "metadata": {}, + "source": [ + "If you want to get automated tracing of your model calls you can also set your [LangSmith](https://docs.smith.langchain.com/) API key by uncommenting below:" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "e7b6a6e0", "metadata": {}, "outputs": [], "source": [ @@ -213,19 +271,96 @@ }, { "cell_type": "markdown", + "id": "63dd7f8d-df13-45d8-8e13-892b29803e96", + "metadata": {}, + "source": [ + "Let's check that Redis is up an running by pinging it:" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "37fc2d36-f5bf-465f-9774-510bdc134b62", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import redis\n", + "\n", + "redis_client = redis.from_url(REDIS_URL)\n", + "redis_client.ping()" + ] + }, + { + "cell_type": "markdown", + "id": "4e388814-4188-4f29-8f24-cc67d4048ebe", + "metadata": {}, + "source": [ + "### Sample Data\n", + "\n", + "The 20 newsgroups dataset comprises around 18000 newsgroups posts on 20 topics. We'll use a subset for this demonstration and focus on two categories: 'alt.atheism' and 'sci.space':" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "869a4726-1e24-48fd-9ffd-c62a589d0bb1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "250" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from langchain.docstore.document import Document\n", + "from sklearn.datasets import fetch_20newsgroups\n", + "\n", + "categories = [\"alt.atheism\", \"sci.space\"]\n", + "newsgroups = fetch_20newsgroups(\n", + " subset=\"train\", categories=categories, shuffle=True, random_state=42\n", + ")\n", + "\n", + "# Use only the first 250 documents\n", + "texts = newsgroups.data[:250]\n", + "metadata = [\n", + " {\"category\": newsgroups.target_names[target]} for target in newsgroups.target[:250]\n", + "]\n", + "\n", + "len(texts)" + ] + }, + { + "cell_type": "markdown", + "id": "93df377e", "metadata": {}, "source": [ "## Initialization\n", "\n", - "The Redis VectorStore instance can be initialized in a number of ways. There are multiple class methods that can be used to initialize a Redis VectorStore instance.\n", + "The RedisVectorStore instance can be initialized in several ways:\n", "\n", - "- ``Redis.__init__`` - Initialize directly\n", - "- ``Redis.from_documents`` - Initialize from a list of ``Langchain.docstore.Document`` objects\n", - "- ``Redis.from_texts`` - Initialize from a list of texts (optionally with metadata)\n", - "- ``Redis.from_texts_return_keys`` - Initialize from a list of texts (optionally with metadata) and return the keys\n", - "- ``Redis.from_existing_index`` - Initialize from an existing Redis index\n", + "- `RedisVectorStore.__init__` - Initialize directly\n", + "- `RedisVectorStore.from_texts` - Initialize from a list of texts (optionally with metadata)\n", + "- `RedisVectorStore.from_documents` - Initialize from a list of `langchain_core.documents.Document` objects\n", + "- `RedisVectorStore.from_existing_index` - Initialize from an existing Redis index\n", "\n", - "Below we will use the ``Redis.__init__`` method. \n", + "Below we will use the `RedisVectorStore.__init__` method using a `RedisConfig` instance.\n", "\n", "```{=mdx}\n", "import EmbeddingTabs from \"@theme/EmbeddingTabs\";\n", @@ -236,146 +371,114 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 7, + "id": "7a95c110-015b-4300-93b7-c0100d55d024", "metadata": {}, "outputs": [], "source": [ + "%%capture\n", "# | output: false\n", "# | echo: false\n", - "from langchain_openai import OpenAIEmbeddings\n", + "os.environ[\"TOKENIZERS_PARALLELISM\"] = \"false\"\n", + "from langchain_huggingface import HuggingFaceEmbeddings\n", + "from tqdm.auto import tqdm\n", "\n", - "embeddings = OpenAIEmbeddings(model=\"text-embedding-3-large\")" + "embeddings = HuggingFaceEmbeddings(model_name=\"msmarco-distilbert-base-v4\")" + ] + }, + { + "cell_type": "markdown", + "id": "ec110186-d5e4-4eaa-a4e3-899f405f719f", + "metadata": {}, + "source": [ + "We'll use the SentenceTransformer model to create embeddings. This model runs locally and doesn't require an API key." ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 8, + "id": "dc37144c-208d-4ab3-9f3a-0407a69fe052", "metadata": { "tags": [] }, "outputs": [], "source": [ - "from langchain_community.vectorstores.redis import Redis\n", + "from langchain_redis import RedisConfig, RedisVectorStore\n", + "\n", + "config = RedisConfig(\n", + " index_name=\"newsgroups\",\n", + " redis_url=REDIS_URL,\n", + " metadata_schema=[\n", + " {\"name\": \"category\", \"type\": \"tag\"},\n", + " ],\n", + ")\n", "\n", - "vector_store = Redis(\n", - " redis_url=\"redis://localhost:6379\",\n", - " embedding=embeddings,\n", - " index_name=\"users\",\n", - ")" + "vector_store = RedisVectorStore(embeddings, config=config)" ] }, { "cell_type": "markdown", + "id": "ac6071d4", "metadata": {}, "source": [ "## Manage vector store\n", "\n", - "Once you have created your vector store, we can interact with it by adding and deleting different items.\n", - "\n", - "### Add items to vector store\n", + "### Add items to vector store" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "17f5efc0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['newsgroups:f1e788ee61fe410daa8ef941dd166223', 'newsgroups:80b39032181f4299a359a9aaed6e2401', 'newsgroups:99a3efc1883647afba53d115b49e6e92', 'newsgroups:503a6c07cd71418eb71e11b42589efd7', 'newsgroups:7351210e32d1427bbb3c7426cf93a44f', 'newsgroups:4e79fdf67abe471b8ee98ba0e8a1a055', 'newsgroups:03559a1d574e4f9ca0479d7b3891402e', 'newsgroups:9a1c2a7879b8409a805db72feac03580', 'newsgroups:3578a1e129f5435f9743cf803413f37a', 'newsgroups:9f68baf4d6b04f1683d6b871ce8ad92d']\n" + ] + } + ], + "source": [ + "ids = vector_store.add_texts(texts, metadata)\n", "\n", - "We can add items to our vector store by using the `add_documents` function." + "print(ids[0:10])" + ] + }, + { + "cell_type": "markdown", + "id": "f8822e55-40d5-48aa-8e29-79101feb645a", + "metadata": {}, + "source": [ + "Let's inspect the first document:" ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 10, + "id": "ca27e394-ae1e-4fdb-b79a-4a6b45a953a8", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "['doc:users:622f5f19-9b4b-4896-9a16-e1e95f19db4b',\n", - " 'doc:users:032b489f-d37e-4bf1-85ec-4c2275be48ef',\n", - " 'doc:users:5daf0855-b352-45bd-9d29-e21ff66e38c8',\n", - " 'doc:users:b9204897-190b-4dd9-af2b-081ed4e9cbb0',\n", - " 'doc:users:9395caff-1a6a-46c1-bc5c-7c5558eadf46',\n", - " 'doc:users:28243c3d-463d-4662-936e-003a2dc0dc30',\n", - " 'doc:users:1e1cdb91-c226-4836-b38e-ee4b61444913',\n", - " 'doc:users:4005bba2-2a08-4160-a16f-5cc3cf9d4aea',\n", - " 'doc:users:8c88440a-06d2-4a68-95f1-c58d0cf99d29',\n", - " 'doc:users:cc20438f-741a-40fd-bed8-4f1cee113680']" + "('From: bil@okcforum.osrhe.edu (Bill Conner)\\nSubject: Re: Not the Omni!\\nNntp-Posting-Host: okcforum.osrhe.edu\\nOrganization: Okcforum Unix Users Group\\nX-Newsreader: TIN [version 1.1 PL6]\\nLines: 18\\n\\nCharley Wingate (mangoe@cs.umd.edu) wrote:\\n: \\n: >> Please enlighten me. How is omnipotence contradictory?\\n: \\n: >By definition, all that can occur in the universe is governed by the rules\\n: >of nature. Thus god cannot break them. Anything that god does must be allowed\\n: >in the rules somewhere. Therefore, omnipotence CANNOT exist! It contradicts\\n: >the rules of nature.\\n: \\n: Obviously, an omnipotent god can change the rules.\\n\\nWhen you say, \"By definition\", what exactly is being defined;\\ncertainly not omnipotence. You seem to be saying that the \"rules of\\nnature\" are pre-existant somehow, that they not only define nature but\\nactually cause it. If that\\'s what you mean I\\'d like to hear your\\nfurther thoughts on the question.\\n\\nBill\\n',\n", + " {'category': 'alt.atheism'})" ] }, - "execution_count": 7, + "execution_count": 10, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "from uuid import uuid4\n", - "\n", - "from langchain_core.documents import Document\n", - "\n", - "document_1 = Document(\n", - " page_content=\"I had chocalate chip pancakes and scrambled eggs for breakfast this morning.\",\n", - " metadata={\"source\": \"tweet\"},\n", - ")\n", - "\n", - "document_2 = Document(\n", - " page_content=\"The weather forecast for tomorrow is cloudy and overcast, with a high of 62 degrees.\",\n", - " metadata={\"source\": \"news\"},\n", - ")\n", - "\n", - "document_3 = Document(\n", - " page_content=\"Building an exciting new project with LangChain - come check it out!\",\n", - " metadata={\"source\": \"tweet\"},\n", - ")\n", - "\n", - "document_4 = Document(\n", - " page_content=\"Robbers broke into the city bank and stole $1 million in cash.\",\n", - " metadata={\"source\": \"news\"},\n", - ")\n", - "\n", - "document_5 = Document(\n", - " page_content=\"Wow! That was an amazing movie. I can't wait to see it again.\",\n", - " metadata={\"source\": \"tweet\"},\n", - ")\n", - "\n", - "document_6 = Document(\n", - " page_content=\"Is the new iPhone worth the price? Read this review to find out.\",\n", - " metadata={\"source\": \"website\"},\n", - ")\n", - "\n", - "document_7 = Document(\n", - " page_content=\"The top 10 soccer players in the world right now.\",\n", - " metadata={\"source\": \"website\"},\n", - ")\n", - "\n", - "document_8 = Document(\n", - " page_content=\"LangGraph is the best framework for building stateful, agentic applications!\",\n", - " metadata={\"source\": \"tweet\"},\n", - ")\n", - "\n", - "document_9 = Document(\n", - " page_content=\"The stock market is down 500 points today due to fears of a recession.\",\n", - " metadata={\"source\": \"news\"},\n", - ")\n", - "\n", - "document_10 = Document(\n", - " page_content=\"I have a bad feeling I am going to get deleted :(\",\n", - " metadata={\"source\": \"tweet\"},\n", - ")\n", - "\n", - "documents = [\n", - " document_1,\n", - " document_2,\n", - " document_3,\n", - " document_4,\n", - " document_5,\n", - " document_6,\n", - " document_7,\n", - " document_8,\n", - " document_9,\n", - " document_10,\n", - "]\n", - "uuids = [str(uuid4()) for _ in range(len(documents))]\n", - "\n", - "vector_store.add_documents(documents=documents, ids=uuids)" + "texts[0], metadata[0]" ] }, { "cell_type": "markdown", + "id": "dcf1b905", "metadata": {}, "source": [ "### Delete items from vector store" @@ -383,26 +486,29 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 11, + "id": "ef61e188", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "True" + "1" ] }, - "execution_count": 8, + "execution_count": 11, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "vector_store.delete(ids=[uuids[-1]])" + "# Delete documents by passing one or more keys/ids\n", + "vector_store.index.drop_keys(ids[0])" ] }, { "cell_type": "markdown", + "id": "021e2e3a-8f87-4d62-a1f3-dc291c6b26be", "metadata": {}, "source": [ "### Inspecting the created Index\n", @@ -412,15 +518,17 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 12, + "id": "787d9cbf-8942-4e6f-b030-f404d4632972", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "\u001b[32m17:24:03\u001b[0m \u001b[34m[RedisVL]\u001b[0m \u001b[1;30mINFO\u001b[0m Indices:\n", - "\u001b[32m17:24:03\u001b[0m \u001b[34m[RedisVL]\u001b[0m \u001b[1;30mINFO\u001b[0m 1. users\n" + "\u001b[32m17:54:50\u001b[0m \u001b[34m[RedisVL]\u001b[0m \u001b[1;30mINFO\u001b[0m Using Redis address from environment variable, REDIS_URL\n", + "\u001b[32m17:54:50\u001b[0m \u001b[34m[RedisVL]\u001b[0m \u001b[1;30mINFO\u001b[0m Indices:\n", + "\u001b[32m17:54:50\u001b[0m \u001b[34m[RedisVL]\u001b[0m \u001b[1;30mINFO\u001b[0m 1. newsgroups\n" ] } ], @@ -431,6 +539,7 @@ }, { "cell_type": "markdown", + "id": "b869de6f-e3da-4bfc-a267-102df1165521", "metadata": {}, "source": [ "The ``Redis`` VectorStore implementation will attempt to generate index schema (fields for filtering) for any metadata passed through the ``from_texts``, ``from_texts_return_keys``, and ``from_documents`` methods. This way, whatever metadata is passed will be indexed into the Redis search index allowing\n", @@ -441,85 +550,84 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 13, + "id": "0eb45eb1-492f-487d-a8a7-7d2d301c7bdb", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ + "\u001b[32m17:54:50\u001b[0m \u001b[34m[RedisVL]\u001b[0m \u001b[1;30mINFO\u001b[0m Using Redis address from environment variable, REDIS_URL\n", "\n", "\n", "Index Information:\n", - "╭──────────────┬────────────────┬───────────────┬─────────────────┬────────────╮\n", - "│ Index Name │ Storage Type │ Prefixes │ Index Options │ Indexing │\n", - "├──────────────┼────────────────┼───────────────┼─────────────────┼────────────┤\n", - "│ users │ HASH │ ['doc:users'] │ [] │ 0 │\n", - "╰──────────────┴────────────────┴───────────────┴─────────────────┴────────────╯\n", + "╭──────────────┬────────────────┬────────────────┬─────────────────┬────────────╮\n", + "│ Index Name │ Storage Type │ Prefixes │ Index Options │ Indexing │\n", + "├──────────────┼────────────────┼────────────────┼─────────────────┼────────────┤\n", + "│ newsgroups │ HASH │ ['newsgroups'] │ [] │ 0 │\n", + "╰──────────────┴────────────────┴────────────────┴─────────────────┴────────────╯\n", "Index Fields:\n", - "╭────────────────┬────────────────┬────────┬────────────────┬────────────────┬────────────────┬────────────────┬────────────────┬────────────────┬─────────────────┬────────────────╮\n", - "│ Name │ Attribute │ Type │ Field Option │ Option Value │ Field Option │ Option Value │ Field Option │ Option Value │ Field Option │ Option Value │\n", - "├────────────────┼────────────────┼────────┼────────────────┼────────────────┼────────────────┼────────────────┼────────────────┼────────────────┼─────────────────┼────────────────┤\n", - "│ content │ content │ TEXT │ WEIGHT │ 1 │ │ │ │ │ │ │\n", - "│ content_vector │ content_vector │ VECTOR │ algorithm │ FLAT │ data_type │ FLOAT32 │ dim │ 3072 │ distance_metric │ COSINE │\n", - "╰────────────────┴────────────────┴────────┴────────────────┴────────────────┴────────────────┴────────────────┴────────────────┴────────────────┴─────────────────┴────────────────╯\n" + "╭───────────┬─────────────┬────────┬────────────────┬────────────────┬────────────────┬────────────────┬────────────────┬────────────────┬─────────────────┬────────────────╮\n", + "│ Name │ Attribute │ Type │ Field Option │ Option Value │ Field Option │ Option Value │ Field Option │ Option Value │ Field Option │ Option Value │\n", + "├───────────┼─────────────┼────────┼────────────────┼────────────────┼────────────────┼────────────────┼────────────────┼────────────────┼─────────────────┼────────────────┤\n", + "│ text │ text │ TEXT │ WEIGHT │ 1 │ │ │ │ │ │ │\n", + "│ embedding │ embedding │ VECTOR │ algorithm │ FLAT │ data_type │ FLOAT32 │ dim │ 768 │ distance_metric │ COSINE │\n", + "│ category │ category │ TAG │ SEPARATOR │ | │ │ │ │ │ │ │\n", + "╰───────────┴─────────────┴────────┴────────────────┴────────────────┴────────────────┴────────────────┴────────────────┴────────────────┴─────────────────┴────────────────╯\n" ] } ], "source": [ - "!rvl index info -i users --port 6379" + "!rvl index info -i newsgroups --port 6379" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 14, + "id": "84f9a77c-41b1-4515-97f4-2635998dc0dd", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ + "\u001b[32m17:54:51\u001b[0m \u001b[34m[RedisVL]\u001b[0m \u001b[1;30mINFO\u001b[0m Using Redis address from environment variable, REDIS_URL\n", "\n", "Statistics:\n", - "╭─────────────────────────────┬─────────────╮\n", - "│ Stat Key │ Value │\n", - "├─────────────────────────────┼─────────────┤\n", - "│ num_docs │ 10 │\n", - "│ num_terms │ 100 │\n", - "│ max_doc_id │ 10 │\n", - "│ num_records │ 116 │\n", - "│ percent_indexed │ 1 │\n", - "│ hash_indexing_failures │ 0 │\n", - "│ number_of_uses │ 1 │\n", - "│ bytes_per_record_avg │ 88.2931 │\n", - "│ doc_table_size_mb │ 0.00108719 │\n", - "│ inverted_sz_mb │ 0.00976753 │\n", - "│ key_table_size_mb │ 0.000304222 │\n", - "│ offset_bits_per_record_avg │ 8 │\n", - "│ offset_vectors_sz_mb │ 0.000102043 │\n", - "│ offsets_per_term_avg │ 0.922414 │\n", - "│ records_per_doc_avg │ 11.6 │\n", - "│ sortable_values_size_mb │ 0 │\n", - "│ total_indexing_time │ 1.373 │\n", - "│ total_inverted_index_blocks │ 100 │\n", - "│ vector_index_sz_mb │ 12.0086 │\n", - "╰─────────────────────────────┴─────────────╯\n" + "╭─────────────────────────────┬────────────╮\n", + "│ Stat Key │ Value │\n", + "├─────────────────────────────┼────────────┤\n", + "│ num_docs │ 249 │\n", + "│ num_terms │ 16178 │\n", + "│ max_doc_id │ 250 │\n", + "│ num_records │ 50394 │\n", + "│ percent_indexed │ 1 │\n", + "│ hash_indexing_failures │ 0 │\n", + "│ number_of_uses │ 2 │\n", + "│ bytes_per_record_avg │ 38.2743 │\n", + "│ doc_table_size_mb │ 0.0263586 │\n", + "│ inverted_sz_mb │ 1.83944 │\n", + "│ key_table_size_mb │ 0.00932026 │\n", + "│ offset_bits_per_record_avg │ 10.6699 │\n", + "│ offset_vectors_sz_mb │ 0.089057 │\n", + "│ offsets_per_term_avg │ 1.38937 │\n", + "│ records_per_doc_avg │ 202.386 │\n", + "│ sortable_values_size_mb │ 0 │\n", + "│ total_indexing_time │ 72.444 │\n", + "│ total_inverted_index_blocks │ 16207 │\n", + "│ vector_index_sz_mb │ 3.01776 │\n", + "╰─────────────────────────────┴────────────╯\n" ] } ], "source": [ - "!rvl stats -i users --port 6379" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "It's important to note that we have not specified that the ``user``, ``job``, ``credit_score`` and ``age`` in the metadata should be fields within the index, this is because the ``Redis`` VectorStore object automatically generate the index schema from the passed metadata. For more information on the generation of index fields, see the API documentation." + "!rvl stats -i newsgroups --port 6379" ] }, { "cell_type": "markdown", + "id": "c3620501", "metadata": {}, "source": [ "## Query vector store\n", @@ -528,424 +636,481 @@ "\n", "### Query directly\n", "\n", - "#### Similarity search\n", - "\n", "Performing a simple similarity search can be done as follows:" ] }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 15, + "id": "aa0a16fa", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "* Building an exciting new project with LangChain - come check it out! [{'id': 'doc:users:5daf0855-b352-45bd-9d29-e21ff66e38c8'}]\n", - "* LangGraph is the best framework for building stateful, agentic applications! [{'id': 'doc:users:4005bba2-2a08-4160-a16f-5cc3cf9d4aea'}]\n" + "Simple Similarity Search Results:\n", + "Content: From: aa429@freenet.carleton.ca (Terry Ford)\n", + "Subject: A flawed propulsion system: Space Shuttle\n", + "X-Ad...\n", + "Metadata: {'category': 'sci.space'}\n", + "\n", + "Content: From: nsmca@aurora.alaska.edu\n", + "Subject: Space Design Movies?\n", + "Article-I.D.: aurora.1993Apr23.124722.1\n", + "...\n", + "Metadata: {'category': 'sci.space'}\n", + "\n" ] } ], "source": [ - "results = vector_store.similarity_search(\n", - " \"LangChain provides abstractions to make working with LLMs easy\", k=2\n", - ")\n", - "for res in results:\n", - " print(f\"* {res.page_content} [{res.metadata}]\")" + "query = \"Tell me about space exploration\"\n", + "results = vector_store.similarity_search(query, k=2)\n", + "\n", + "print(\"Simple Similarity Search Results:\")\n", + "for doc in results:\n", + " print(f\"Content: {doc.page_content[:100]}...\")\n", + " print(f\"Metadata: {doc.metadata}\")\n", + " print()" ] }, { "cell_type": "markdown", + "id": "3ed9d733", "metadata": {}, "source": [ - "#### Similarity search with score\n", - "\n", - "You can also search with score:" + "If you want to execute a similarity search and receive the corresponding scores you can run:" ] }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 16, + "id": "5efd2eaa", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "* [SIM=0.446900] The weather forecast for tomorrow is cloudy and overcast, with a high of 62 degrees. [{'id': 'doc:users:032b489f-d37e-4bf1-85ec-4c2275be48ef'}]\n" + "Similarity Search with Score Results:\n", + "Content: From: aa429@freenet.carleton.ca (Terry Ford)\n", + "Subject: A flawed propulsion system: Space Shuttle\n", + "X-Ad...\n", + "Metadata: {'category': 'sci.space'}\n", + "Score: 0.569670975208\n", + "\n", + "Content: From: nsmca@aurora.alaska.edu\n", + "Subject: Space Design Movies?\n", + "Article-I.D.: aurora.1993Apr23.124722.1\n", + "...\n", + "Metadata: {'category': 'sci.space'}\n", + "Score: 0.590400338173\n", + "\n" ] } ], "source": [ - "results = vector_store.similarity_search_with_score(\"Will it be hot tomorrow?\", k=1)\n", - "for res, score in results:\n", - " print(f\"* [SIM={score:3f}] {res.page_content} [{res.metadata}]\")" + "# Similarity search with score and filter\n", + "scored_results = vector_store.similarity_search_with_score(query, k=2)\n", + "\n", + "print(\"Similarity Search with Score Results:\")\n", + "for doc, score in scored_results:\n", + " print(f\"Content: {doc.page_content[:100]}...\")\n", + " print(f\"Metadata: {doc.metadata}\")\n", + " print(f\"Score: {score}\")\n", + " print()" ] }, { "cell_type": "markdown", + "id": "0c235cdc", "metadata": {}, "source": [ - "#### Other search methods\n", - "\n", - "For a list of all the search functions available to the `Redis` vector store, please refer to the [API reference](https://api.python.langchain.com/en/latest/vectorstores/langchain_community.vectorstores.redis.base.Redis.html)\n", - "\n", - "## Connect to an existing Index\n", - "\n", - "In order to have the same metadata indexed when using the ``Redis`` VectorStore. You will need to have the same ``index_schema`` passed in either as a path to a yaml file or as a dictionary. The following shows how to obtain the schema from an index and connect to an existing index." - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [], - "source": [ - "# write the schema to a yaml file\n", - "vector_store.write_schema(\"redis_schema.yaml\")" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'id': 'doc:users:8484c48a032d4c4cbe3cc2ed6845fabb', 'user': 'john', 'job': 'engineer', 'credit_score': 'high', 'age': '18'}\n" - ] - } - ], - "source": [ - "# now we can connect to our existing index as follows\n", + "### Query by turning into retriever\n", "\n", - "new_rds = Redis.from_existing_index(\n", - " embeddings,\n", - " index_name=\"users\",\n", - " redis_url=\"redis://localhost:6379\",\n", - " schema=\"redis_schema.yaml\",\n", - ")\n", - "results = new_rds.similarity_search(\"foo\", k=3)\n", - "print(results[0].metadata)" + "You can also transform the vector store into a retriever for easier usage in your chains." ] }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 17, + "id": "f3460093", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "True" + "[Document(metadata={'category': 'sci.space'}, page_content='Subject: Re: Comet in Temporary Orbit Around Jupiter?\\nFrom: Robert Coe \\nDistribution: world\\nOrganization: 1776 Enterprises, Sudbury MA\\nLines: 23\\n\\njgarland@kean.ucs.mun.ca writes:\\n\\n> >> Also, perihelions of Gehrels3 were:\\n> >> \\n> >> April 1973 83 jupiter radii\\n> >> August 1970 ~3 jupiter radii\\n> > \\n> > Where 1 Jupiter radius = 71,000 km = 44,000 mi = 0.0005 AU. So the\\n> > 1970 figure seems unlikely to actually be anything but a perijove.\\n> > Is that the case for the 1973 figure as well?\\n> > -- \\n> Sorry, _perijoves_...I\\'m not used to talking this language.\\n\\nHmmmm.... The prefix \"peri-\" is Greek, not Latin, so it\\'s usually used\\nwith the Greek form of the name of the body being orbited. (That\\'s why\\nit\\'s \"perihelion\" rather than \"perisol\", \"perigee\" rather than \"periterr\",\\nand \"pericynthion\" rather than \"perilune\".) So for Jupiter I\\'d expect it\\nto be something like \"perizeon\".) :^)\\n\\n ___ _ - Bob\\n /__) _ / / ) _ _\\n(_/__) (_)_(_) (___(_)_(/_______________________________________ bob@1776.COM\\nRobert K. Coe ** 14 Churchill St, Sudbury, Massachusetts 01776 ** 508-443-3265\\n'),\n", + " Document(metadata={'category': 'sci.space'}, page_content='From: pyron@skndiv.dseg.ti.com (Dillon Pyron)\\nSubject: Re: Why not give $1 billion to first year-long moon residents?\\nLines: 42\\nNntp-Posting-Host: skndiv.dseg.ti.com\\nReply-To: pyron@skndiv.dseg.ti.com\\nOrganization: TI/DSEG VAX Support\\n\\n\\nIn article <1qve4kINNpas@sal-sun121.usc.edu>, schaefer@sal-sun121.usc.edu (Peter Schaefer) writes:\\n>In article <1993Apr19.130503.1@aurora.alaska.edu>, nsmca@aurora.alaska.edu writes:\\n>|> In article <6ZV82B2w165w@theporch.raider.net>, gene@theporch.raider.net (Gene Wright) writes:\\n>|> > With the continuin talk about the \"End of the Space Age\" and complaints \\n>|> > by government over the large cost, why not try something I read about \\n>|> > that might just work.\\n>|> > \\n>|> > Announce that a reward of $1 billion would go to the first corporation \\n>|> > who successfully keeps at least 1 person alive on the moon for a year. \\n>|> > Then you\\'d see some of the inexpensive but not popular technologies begin \\n>|> > to be developed. THere\\'d be a different kind of space race then!\\n>|> > \\n>|> > --\\n>|> > gene@theporch.raider.net (Gene Wright)\\n>|> > theporch.raider.net 615/297-7951 The MacInteresteds of Nashville\\n>|> ====\\n>|> If that were true, I\\'d go for it.. I have a few friends who we could pool our\\n>|> resources and do it.. Maybe make it a prize kind of liek the \"Solar Car Race\"\\n>|> in Australia..\\n>|> Anybody game for a contest!\\n>|> \\n>|> ==\\n>|> Michael Adams, nsmca@acad3.alaska.edu -- I\\'m not high, just jacked\\n>\\n>\\n>Oh gee, a billion dollars! That\\'d be just about enough to cover the cost of the\\n>feasability study! Happy, Happy, JOY! JOY!\\n>\\n\\nFeasability study?? What a wimp!! While you are studying, others would be\\ndoing. Too damn many engineers doing way too little engineering.\\n\\n\"He who sits on his arse sits on his fortune\" - Sir Richard Francis Burton\\n--\\nDillon Pyron | The opinions expressed are those of the\\nTI/DSEG Lewisville VAX Support | sender unless otherwise stated.\\n(214)462-3556 (when I\\'m here) |\\n(214)492-4656 (when I\\'m home) |Texans: Vote NO on Robin Hood. We need\\npyron@skndiv.dseg.ti.com |solutions, not gestures.\\nPADI DM-54909 |\\n\\n')]" ] }, - "execution_count": 20, + "execution_count": 17, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "# see the schemas are the same\n", - "new_rds.schema == vector_store.schema" + "retriever = vector_store.as_retriever(search_type=\"similarity\", search_kwargs={\"k\": 2})\n", + "retriever.invoke(\"What planet in the solar system has the largest number of moons?\")" ] }, { "cell_type": "markdown", + "id": "901c75dc", "metadata": {}, "source": [ - "## Custom metadata indexing\n", - "\n", - "In some cases, you may want to control what fields the metadata maps to. For example, you may want the ``credit_score`` field to be a categorical field instead of a text field (which is the default behavior for all string fields). In this case, you can use the ``index_schema`` parameter in each of the initialization methods above to specify the schema for the index. Custom index schema can either be passed as a dictionary or as a path to a YAML file.\n", - "\n", - "All arguments in the schema have defaults besides the name, so you can specify only the fields you want to change. All the names correspond to the snake/lowercase versions of the arguments you would use on the command line with ``redis-cli`` or in ``redis-py``. For more on the arguments for each field, see the [documentation](https://redis.io/docs/interact/search-and-query/basic-constructs/field-and-type-options/)\n", - "\n", - "The below example shows how to specify the schema for the ``credit_score`` field as a Tag (categorical) field instead of a text field. \n", - "\n", - "```yaml\n", - "# index_schema.yml\n", - "tag:\n", - " - name: credit_score\n", - "text:\n", - " - name: user\n", - " - name: job\n", - "numeric:\n", - " - name: age\n", - "```\n", - "\n", - "In Python, this would look like:\n", - "\n", - "```python\n", + "## Usage for retrieval-augmented generation\n", "\n", - "index_schema = {\n", - " \"tag\": [{\"name\": \"credit_score\"}],\n", - " \"text\": [{\"name\": \"user\"}, {\"name\": \"job\"}],\n", - " \"numeric\": [{\"name\": \"age\"}],\n", - "}\n", + "For guides on how to use this vector store for retrieval-augmented generation (RAG), see the following sections:\n", "\n", - "```\n", + "- [Tutorials: working with external knowledge](https://python.langchain.com/v0.2/docs/tutorials/#working-with-external-knowledge)\n", + "- [How-to: Question and answer with RAG](https://python.langchain.com/v0.2/docs/how_to/#qa-with-rag)\n", + "- [Retrieval conceptual docs](https://python.langchain.com/v0.2/docs/concepts/#retrieval)" + ] + }, + { + "cell_type": "markdown", + "id": "069f1b5f", + "metadata": {}, + "source": [ + "## Redis-specific functionality\n", "\n", - "Notice that only the ``name`` field needs to be specified. All other fields have defaults." + "Redis offers some unique features for vector search:" + ] + }, + { + "cell_type": "markdown", + "id": "8a627d3a-af78-46e2-b314-007e641b4d1d", + "metadata": {}, + "source": [ + "### Similarity search with metadata filtering\n", + "We can filter our search results based on metadata:" ] }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 18, + "id": "23d6e6fe-8aee-4cee-bf05-59cf3fba36ae", "metadata": {}, "outputs": [ { - "name": "stderr", + "name": "stdout", "output_type": "stream", "text": [ - "`index_schema` does not match generated metadata schema.\n", - "If you meant to manually override the schema, please ignore this message.\n", - "index_schema: {'tag': [{'name': 'credit_score'}], 'text': [{'name': 'user'}, {'name': 'job'}], 'numeric': [{'name': 'age'}]}\n", - "generated_schema: {'text': [{'name': 'user'}, {'name': 'job'}, {'name': 'credit_score'}], 'numeric': [{'name': 'age'}], 'tag': []}\n", + "Filtered Similarity Search Results:\n", + "Content: From: aa429@freenet.carleton.ca (Terry Ford)\n", + "Subject: A flawed propulsion system: Space Shuttle\n", + "X-Ad...\n", + "Metadata: {'category': 'sci.space'}\n", + "\n", + "Content: From: nsmca@aurora.alaska.edu\n", + "Subject: Space Design Movies?\n", + "Article-I.D.: aurora.1993Apr23.124722.1\n", + "...\n", + "Metadata: {'category': 'sci.space'}\n", "\n" ] } ], "source": [ - "# create a new index with the new schema defined above\n", - "index_schema = {\n", - " \"tag\": [{\"name\": \"credit_score\"}],\n", - " \"text\": [{\"name\": \"user\"}, {\"name\": \"job\"}],\n", - " \"numeric\": [{\"name\": \"age\"}],\n", - "}\n", - "texts = [] # list of texts\n", - "metadata = {} # dictionary of metadata\n", - "\n", - "rds, keys = Redis.from_texts_return_keys(\n", - " texts,\n", - " embeddings,\n", - " metadatas=metadata,\n", - " redis_url=\"redis://localhost:6379\",\n", - " index_name=\"users_modified\",\n", - " index_schema=index_schema, # pass in the new index schema\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The above warning is meant to notify users when they are overriding the default behavior. Ignore it if you are intentionally overriding the behavior." + "from redisvl.query.filter import Tag\n", + "\n", + "query = \"Tell me about space exploration\"\n", + "\n", + "# Create a RedisVL filter expression\n", + "filter_condition = Tag(\"category\") == \"sci.space\"\n", + "\n", + "filtered_results = vector_store.similarity_search(query, k=2, filter=filter_condition)\n", + "\n", + "print(\"Filtered Similarity Search Results:\")\n", + "for doc in filtered_results:\n", + " print(f\"Content: {doc.page_content[:100]}...\")\n", + " print(f\"Metadata: {doc.metadata}\")\n", + " print()" ] }, { "cell_type": "markdown", + "id": "f35b9ebf-6419-4402-a066-c1d5287ed38e", "metadata": {}, "source": [ - "## Hybrid filtering\n", - "\n", - "With the Redis Filter Expression language built into LangChain, you can create arbitrarily long chains of hybrid filters\n", - "that can be used to filter your search results. The expression language is derived from the [RedisVL Expression Syntax](https://redisvl.com)\n", - "and is designed to be easy to use and understand.\n", - "\n", - "The following are the available filter types:\n", - "- ``RedisText``: Filter by full-text search against metadata fields. Supports exact, fuzzy, and wildcard matching.\n", - "- ``RedisNum``: Filter by numeric range against metadata fields.\n", - "- ``RedisTag``: Filter by the exact match against string-based categorical metadata fields. Multiple tags can be specified like \"tag1,tag2,tag3\".\n", - "\n", - "The following are examples of utilizing these filters.\n", - "\n", - "```python\n", - "\n", - "from langchain_community.vectorstores.redis import RedisText, RedisNum, RedisTag\n", - "\n", - "# exact matching\n", - "has_high_credit = RedisTag(\"credit_score\") == \"high\"\n", - "does_not_have_high_credit = RedisTag(\"credit_score\") != \"low\"\n", - "\n", - "# fuzzy matching\n", - "job_starts_with_eng = RedisText(\"job\") % \"eng*\"\n", - "job_is_engineer = RedisText(\"job\") == \"engineer\"\n", - "job_is_not_engineer = RedisText(\"job\") != \"engineer\"\n", - "\n", - "# numeric filtering\n", - "age_is_18 = RedisNum(\"age\") == 18\n", - "age_is_not_18 = RedisNum(\"age\") != 18\n", - "age_is_greater_than_18 = RedisNum(\"age\") > 18\n", - "age_is_less_than_18 = RedisNum(\"age\") < 18\n", - "age_is_greater_than_or_equal_to_18 = RedisNum(\"age\") >= 18\n", - "age_is_less_than_or_equal_to_18 = RedisNum(\"age\") <= 18\n", - "\n", - "```\n", - "\n", - "The ``RedisFilter`` class can be used to simplify the import of these filters as follows\n", - "\n", - "```python\n", - "\n", - "from langchain_community.vectorstores.redis import RedisFilter\n", - "\n", - "# same examples as above\n", - "has_high_credit = RedisFilter.tag(\"credit_score\") == \"high\"\n", - "does_not_have_high_credit = RedisFilter.num(\"age\") > 8\n", - "job_starts_with_eng = RedisFilter.text(\"job\") % \"eng*\"\n", - "```\n", - "\n", - "The following are examples of using a hybrid filter for search" + "### Maximum marginal relevance search\n", + "Maximum marginal relevance search helps in getting diverse results:" ] }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 19, + "id": "5be2afeb-d0a3-4075-bd3c-4cbe409dfb3a", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Job: engineer\n", - "Engineers in the dataset: 2\n" + "Maximum Marginal Relevance Search Results:\n", + "Content: From: aa429@freenet.carleton.ca (Terry Ford)\n", + "Subject: A flawed propulsion system: Space Shuttle\n", + "X-Ad...\n", + "Metadata: {'category': 'sci.space'}\n", + "\n", + "Content: From: moroney@world.std.com (Michael Moroney)\n", + "Subject: Re: Vulcan? (No, not the guy with the ears!)\n", + "...\n", + "Metadata: {'category': 'sci.space'}\n", + "\n" ] } ], "source": [ - "from langchain_community.vectorstores.redis import RedisText\n", + "# Maximum marginal relevance search with filter\n", + "mmr_results = vector_store.max_marginal_relevance_search(\n", + " query, k=2, fetch_k=10, filter=filter_condition\n", + ")\n", + "\n", + "print(\"Maximum Marginal Relevance Search Results:\")\n", + "for doc in mmr_results:\n", + " print(f\"Content: {doc.page_content[:100]}...\")\n", + " print(f\"Metadata: {doc.metadata}\")\n", + " print()" + ] + }, + { + "cell_type": "markdown", + "id": "09c3343c-6af4-4151-ba0a-50800fc34855", + "metadata": {}, + "source": [ + "## Chain usage\n", + "The code below shows how to use the vector store as a retriever in a simple RAG chain:\n", "\n", - "is_engineer = RedisText(\"job\") == \"engineer\"\n", - "results = rds.similarity_search(\"foo\", k=3, filter=is_engineer)\n", + "```{=mdx}\n", + "import ChatModelTabs from \"@theme/ChatModelTabs\";\n", "\n", - "print(\"Job:\", results[0].metadata[\"job\"])\n", - "print(\"Engineers in the dataset:\", len(results))" + "\n", + "```" ] }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 20, + "id": "9f6658f8-45b7-4004-a0b3-893bd23bff41", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Job: doctor\n", - "Job: doctor\n", - "Jobs in dataset that start with 'doc': 2\n" + "OpenAI API key not found in environment variables.\n" + ] + }, + { + "name": "stdin", + "output_type": "stream", + "text": [ + "Please enter your OpenAI API key: ········\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OpenAI API key has been set for this session.\n" ] } ], "source": [ - "# fuzzy match\n", - "starts_with_doc = RedisText(\"job\") % \"doc*\"\n", - "results = rds.similarity_search(\"foo\", k=3, filter=starts_with_doc)\n", + "# | output: false\n", + "# | echo: false\n", + "from getpass import getpass\n", + "\n", + "from langchain_openai import ChatOpenAI\n", + "\n", + "# Check if OPENAI_API_KEY is already set in the environment\n", + "openai_api_key = os.getenv(\"OPENAI_API_KEY\")\n", + "\n", + "if not openai_api_key:\n", + " print(\"OpenAI API key not found in environment variables.\")\n", + " openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "\n", + " # Set the API key for the current session\n", + " os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", + " print(\"OpenAI API key has been set for this session.\")\n", + "else:\n", + " print(\"OpenAI API key found in environment variables.\")\n", "\n", - "for result in results:\n", - " print(\"Job:\", result.metadata[\"job\"])\n", - "print(\"Jobs in dataset that start with 'doc':\", len(results))" + "llm = ChatOpenAI(model=\"gpt-4o-mini\")" ] }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 21, + "id": "d0ac614c-3f80-4839-8451-d3322a870809", "metadata": {}, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "User: derrick is 45\n", - "User: nancy is 94\n", - "User: joe is 35\n" - ] + "data": { + "text/plain": [ + "'The Space Shuttle program was a NASA initiative that enabled reusable spacecraft to transport astronauts and cargo to and from low Earth orbit. It conducted a variety of missions, including satellite deployment, scientific research, and assembly of the International Space Station, and typically carried a crew of five astronauts. Although it achieved many successes, the program faced criticism for its safety concerns and the complexity of its propulsion system.'" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" } ], "source": [ - "from langchain_community.vectorstores.redis import RedisNum\n", + "from langchain_core.output_parsers import StrOutputParser\n", + "from langchain_core.prompts import ChatPromptTemplate\n", + "from langchain_core.runnables import RunnablePassthrough\n", + "\n", + "# Prompt\n", + "prompt = ChatPromptTemplate.from_messages(\n", + " [\n", + " (\n", + " \"human\",\n", + " \"\"\"You are an assistant for question-answering tasks. Use the following pieces of retrieved context to answer the question. If you don't know the answer, just say that you don't know. Use three sentences maximum and keep the answer concise.\n", + "Question: {question} \n", + "Context: {context} \n", + "Answer:\"\"\",\n", + " ),\n", + " ]\n", + ")\n", + "\n", + "\n", + "def format_docs(docs):\n", + " return \"\\n\\n\".join(doc.page_content for doc in docs)\n", + "\n", + "\n", + "rag_chain = (\n", + " {\"context\": retriever | format_docs, \"question\": RunnablePassthrough()}\n", + " | prompt\n", + " | llm\n", + " | StrOutputParser()\n", + ")\n", "\n", - "is_over_18 = RedisNum(\"age\") > 18\n", - "is_under_99 = RedisNum(\"age\") < 99\n", - "age_range = is_over_18 & is_under_99\n", - "results = rds.similarity_search(\"foo\", filter=age_range)\n", + "rag_chain.invoke(\"Describe the Space Shuttle program?\")" + ] + }, + { + "cell_type": "markdown", + "id": "8ad3e6e4-36ef-494a-be50-4bf8e374b077", + "metadata": {}, + "source": [ + "## Connect to an existing Index\n", "\n", - "for result in results:\n", - " print(\"User:\", result.metadata[\"user\"], \"is\", result.metadata[\"age\"])" + "In order to have the same metadata indexed when using the ``Redis`` VectorStore. You will need to have the same ``index_schema`` passed in either as a path to a yaml file or as a dictionary. The following shows how to obtain the schema from an index and connect to an existing index." + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "6a0e7a49-8271-44b2-abb2-0ef499546b28", + "metadata": {}, + "outputs": [], + "source": [ + "# write the schema to a yaml file\n", + "vector_store.index.schema.to_yaml(\"redis_schema.yaml\")" ] }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 33, + "id": "e3588805-b3d9-4af8-8786-b57fc640ebb0", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "User: derrick is 45\n", - "User: nancy is 94\n", - "User: joe is 35\n" + "18:19:58 redisvl.index.index INFO Index already exists, not overwriting.\n", + "page_content='From: aa429@freenet.carleton.ca (Terry Ford)\n", + "Subject: A flawed propulsion system: Space Shuttle\n", + "X-Added: Forwarded by Space Digest\n", + "Organization: [via International Space University]\n", + "Original-Sender: isu@VACATION.VENARI.CS.CMU.EDU\n", + "Distribution: sci\n", + "Lines: 13\n", + "\n", + "\n", + "\n", + "For an essay, I am writing about the space shuttle and a need for a better\n", + "propulsion system. Through research, I have found that it is rather clumsy \n", + "(i.e. all the checks/tests before launch), the safety hazards (\"sitting\n", + "on a hydrogen bomb\"), etc.. If you have any beefs about the current\n", + "space shuttle program Re: propulsion, please send me your ideas.\n", + "\n", + "Thanks a lot.\n", + "\n", + "--\n", + "Terry Ford [aa429@freenet.carleton.ca]\n", + "Nepean, Ontario, Canada.\n", + "' metadata={'category': 'sci.space'}\n" ] } ], "source": [ - "# make sure to use parenthesis around FilterExpressions\n", - "# if initializing them while constructing them\n", - "age_range = (RedisNum(\"age\") > 18) & (RedisNum(\"age\") < 99)\n", - "results = rds.similarity_search(\"foo\", filter=age_range)\n", - "\n", - "for result in results:\n", - " print(\"User:\", result.metadata[\"user\"], \"is\", result.metadata[\"age\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Query by turning into retriever\n", + "# now we can connect to our existing index as follows\n", "\n", - "You can also transform the vector store into a retriever for easier usage in your chains. Here we go over different options for using the vector store as a retriever.\n", + "new_rdvs = RedisVectorStore(\n", + " embeddings,\n", + " redis_url=REDIS_URL,\n", + " schema_path=\"redis_schema.yaml\",\n", + ")\n", "\n", - "There are three different search methods we can use to do retrieval. By default, it will use semantic similarity. To see all the options, please refer to the [API reference](https://api.python.langchain.com/en/latest/vectorstores/langchain_community.vectorstores.redis.base.Redis.html#langchain_community.vectorstores.redis.base.Redis.as_retriever)" + "results = new_rdvs.similarity_search(\"Space Shuttle Propulsion System\", k=3)\n", + "print(results[0])" ] }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 35, + "id": "4d7ff456-de2a-4c58-9a3f-a9a3cfdca492", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "[Document(metadata={'id': 'doc:users:b9204897-190b-4dd9-af2b-081ed4e9cbb0'}, page_content='Robbers broke into the city bank and stole $1 million in cash.')]" + "True" ] }, - "execution_count": 16, + "execution_count": 35, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "retriever = vector_store.as_retriever(\n", - " search_type=\"similarity_score_threshold\",\n", - " search_kwargs={\"k\": 1, \"score_threshold\": 0.2},\n", - ")\n", - "retriever.invoke(\"Stealing from the bank is a crime\")" + "# compare the two schemas to verify they are the same\n", + "new_rdvs.index.schema == vector_store.index.schema" ] }, { "cell_type": "markdown", + "id": "044a2a8c-cb25-453b-a439-38fcb06081ab", "metadata": {}, "source": [ - "## Usage for retrieval-augmented generation\n", - "\n", - "For guides on how to use this vector store for retrieval-augmented generation (RAG), see the following sections:\n", - "\n", - "- [Tutorials: working with external knowledge](https://python.langchain.com/v0.2/docs/tutorials/#working-with-external-knowledge)\n", - "- [How-to: Question and answer with RAG](https://python.langchain.com/v0.2/docs/how_to/#qa-with-rag)\n", - "- [Retrieval conceptual docs](https://python.langchain.com/v0.2/docs/concepts/#retrieval)" + "## Cleanup vector store" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "bb24ab8b-1040-489d-bef6-9137dd2215f3", + "metadata": {}, + "outputs": [], + "source": [ + "# Clear vector store\n", + "vector_store.index.delete(drop=True)" ] }, { "cell_type": "markdown", + "id": "8a27244f", "metadata": {}, "source": [ "## API reference\n", "\n", - "For detailed documentation of all `Redis` vector store features and configurations head to the API reference: https://api.python.langchain.com/en/latest/vectorstores/langchain_community.vectorstores.redis.base.Redis.html" + "For detailed documentation of all RedisVectorStore features and configurations head to the API reference: https://api.python.langchain.com/en/latest/vectorstores/langchain_redis.vectorstores.RedisVectorStore.html" ] } ], @@ -969,5 +1134,5 @@ } }, "nbformat": 4, - "nbformat_minor": 4 + "nbformat_minor": 5 } From e499caa9cd3f44d9ff843159eb18402bcd7b7c34 Mon Sep 17 00:00:00 2001 From: mschoenb97IL Date: Thu, 22 Aug 2024 13:10:51 -0400 Subject: [PATCH 63/80] community: Give more context on DeepInfra 500 errors (#25671) Description: DeepInfra 500 errors have useful information in the text field that isn't being exposed to the user. I updated the error message to fix this. As an example, this code ``` from langchain_community.chat_models import ChatDeepInfra from langchain_core.messages import HumanMessage model = "meta-llama/Meta-Llama-3-70B-Instruct" deepinfra_api_token = "..." model = ChatDeepInfra(model=model, deepinfra_api_token=deepinfra_api_token) messages = [HumanMessage("All work and no play makes Jack a dull boy\n" * 9000)] response = model.invoke(messages) ``` Currently gives this error: ``` langchain_community.chat_models.deepinfra.ChatDeepInfraException: DeepInfra Server: Error 500 ``` This change would give the following error: ``` langchain_community.chat_models.deepinfra.ChatDeepInfraException: DeepInfra Server error status 500: {"error":{"message":"Requested input length 99009 exceeds maximum input length 8192"}} ``` --- libs/community/langchain_community/chat_models/deepinfra.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/libs/community/langchain_community/chat_models/deepinfra.py b/libs/community/langchain_community/chat_models/deepinfra.py index 41da4adc9d83d..ee670248a6716 100644 --- a/libs/community/langchain_community/chat_models/deepinfra.py +++ b/libs/community/langchain_community/chat_models/deepinfra.py @@ -449,7 +449,9 @@ def _llm_type(self) -> str: def _handle_status(self, code: int, text: Any) -> None: if code >= 500: - raise ChatDeepInfraException(f"DeepInfra Server: Error {code}") + raise ChatDeepInfraException( + f"DeepInfra Server error status {code}: {text}" + ) elif code >= 400: raise ValueError(f"DeepInfra received an invalid payload: {text}") elif code != 200: From 16fc0a866e32a730c0513ee42cdcaf4b4523edf4 Mon Sep 17 00:00:00 2001 From: Dylan Date: Fri, 23 Aug 2024 03:15:45 +1000 Subject: [PATCH 64/80] docs: Change Pull Request to Merge Request in GitLab notebook (#25649) - **Description:** In GitLab we call these "merge requests" rather than "pull requests" so I thought I'd go ahead and update the notebook. - **Issue:** N/A - **Dependencies:** none - **Twitter handle:** N/A Thanks for creating the tools and notebook to help people work with GitLab. I thought I'd contribute some minor docs updates here. --- docs/docs/integrations/tools/gitlab.ipynb | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/docs/integrations/tools/gitlab.ipynb b/docs/docs/integrations/tools/gitlab.ipynb index 3eccad6b76835..b22faeeb88e21 100644 --- a/docs/docs/integrations/tools/gitlab.ipynb +++ b/docs/docs/integrations/tools/gitlab.ipynb @@ -23,7 +23,7 @@ "\n", "3. **Comment on Issue**- posts a comment on a specific issue.\n", "\n", - "4. **Create Pull Request**- creates a pull request from the bot's working branch to the base branch.\n", + "4. **Create Merge Request**- creates a merge request from the bot's working branch to the base branch.\n", "\n", "5. **Create File**- creates a new file in the repository.\n", "\n", @@ -82,7 +82,7 @@ "* **GITLAB_PERSONAL_ACCESS_TOKEN**- The personal access token you created in the last step\n", "* **GITLAB_REPOSITORY**- The name of the Gitlab repository you want your bot to act upon. Must follow the format {username}/{repo-name}.\n", "* **GITLAB_BRANCH**- The branch where the bot will make its commits. Defaults to 'main.'\n", - "* **GITLAB_BASE_BRANCH**- The base branch of your repo, usually either 'main' or 'master.' This is where pull requests will base from. Defaults to 'main.'\n" + "* **GITLAB_BASE_BRANCH**- The base branch of your repo, usually either 'main' or 'master.' This is where merge requests will base from. Defaults to 'main.'\n" ] }, { @@ -185,14 +185,14 @@ "\n", ">>>> NEW\u001b[0m\n", "Observation: \u001b[36;1m\u001b[1;3mUpdated file game.html\u001b[0m\n", - "Thought:\u001b[32;1m\u001b[1;3m I need to create a pull request to submit my changes.\n", - "Action: Create Pull Request\n", + "Thought:\u001b[32;1m\u001b[1;3m I need to create a merge request to submit my changes.\n", + "Action: Create Merge Request\n", "Action Input: Add tic-tac-toe game\n", "\n", "added tic-tac-toe game, closes issue #15\u001b[0m\n", - "Observation: \u001b[36;1m\u001b[1;3mSuccessfully created PR number 12\u001b[0m\n", + "Observation: \u001b[36;1m\u001b[1;3mSuccessfully created MR number 12\u001b[0m\n", "Thought:\u001b[32;1m\u001b[1;3m I now know the final answer.\n", - "Final Answer: I have created a pull request with number 12 that solves issue 15.\u001b[0m\n", + "Final Answer: I have created a merge request with number 12 that solves issue 15.\u001b[0m\n", "\n", "\u001b[1m> Finished chain.\u001b[0m\n" ] @@ -200,7 +200,7 @@ { "data": { "text/plain": [ - "'I have created a pull request with number 12 that solves issue 15.'" + "'I have created a merge request with number 12 that solves issue 15.'" ] }, "execution_count": 8, @@ -210,7 +210,7 @@ ], "source": [ "agent.run(\n", - " \"You have the software engineering capabilities of a Google Principle engineer. You are tasked with completing issues on a gitlab repository. Please look at the open issues and complete them by creating pull requests that solve the issues.\"\n", + " \"You have the software engineering capabilities of a Google Principle engineer. You are tasked with completing issues on a gitlab repository. Please look at the open issues and complete them by creating merge requests that solve the issues.\"\n", ")" ] }, From 47adc7f32bdac2b250012036e42d296de4eaeb45 Mon Sep 17 00:00:00 2001 From: Leonid Ganeline Date: Thu, 22 Aug 2024 10:19:17 -0700 Subject: [PATCH 65/80] docs: `integrations` reference updates 11 (#25598) Added missed provider pages and links. --- docs/docs/integrations/providers/dria.mdx | 25 +++++++++++ .../providers/duckduckgo_search.mdx | 25 +++++++++++ docs/docs/integrations/providers/e2b.mdx | 20 +++++++++ .../integrations/providers/elasticsearch.mdx | 45 +++++++++++++++++-- 4 files changed, 112 insertions(+), 3 deletions(-) create mode 100644 docs/docs/integrations/providers/dria.mdx create mode 100644 docs/docs/integrations/providers/duckduckgo_search.mdx create mode 100644 docs/docs/integrations/providers/e2b.mdx diff --git a/docs/docs/integrations/providers/dria.mdx b/docs/docs/integrations/providers/dria.mdx new file mode 100644 index 0000000000000..7e3c5cdbace43 --- /dev/null +++ b/docs/docs/integrations/providers/dria.mdx @@ -0,0 +1,25 @@ +# Dria + +>[Dria](https://dria.co/) is a hub of public RAG models for developers to +> both contribute and utilize a shared embedding lake. + +See more details about the LangChain integration with Dria +at [this page](https://dria.co/docs/integrations/langchain). + +## Installation and Setup + +You have to install a python package: + +```bash +pip install dria +``` + +You have to get an API key from Dria. You can get it by signing up at [Dria](https://dria.co/). + +## Retrievers + +See a [usage example](/docs/integrations/retrievers/dria_index). + +```python +from langchain_community.retrievers import DriaRetriever +``` diff --git a/docs/docs/integrations/providers/duckduckgo_search.mdx b/docs/docs/integrations/providers/duckduckgo_search.mdx new file mode 100644 index 0000000000000..29ab01981f45f --- /dev/null +++ b/docs/docs/integrations/providers/duckduckgo_search.mdx @@ -0,0 +1,25 @@ +# DuckDuckGo Search + +>[DuckDuckGo Search](https://github.com/deedy5/duckduckgo_search) is a package that +> searches for words, documents, images, videos, news, maps and text +> translation using the `DuckDuckGo.com` search engine. It is downloading files +> and images to a local hard drive. + +## Installation and Setup + +You have to install a python package: + +```bash +pip install duckduckgo-search +``` + +## Tools + +See a [usage example](/docs/integrations/tools/ddg). + +There are two tools available: + +```python +from langchain_community.tools import DuckDuckGoSearchRun +from langchain_community.tools import DuckDuckGoSearchResults +``` diff --git a/docs/docs/integrations/providers/e2b.mdx b/docs/docs/integrations/providers/e2b.mdx new file mode 100644 index 0000000000000..ee0ca085aa440 --- /dev/null +++ b/docs/docs/integrations/providers/e2b.mdx @@ -0,0 +1,20 @@ +# E2B + +>[E2B](https://e2b.dev/) provides open-source secure sandboxes +> for AI-generated code execution. See more [here](https://github.com/e2b-dev). + +## Installation and Setup + +You have to install a python package: + +```bash +pip install e2b_code_interpreter +``` + +## Tool + +See a [usage example](/docs/integrations/tools/e2b_data_analysis). + +```python +from langchain_community.tools import E2BDataAnalysisTool +``` diff --git a/docs/docs/integrations/providers/elasticsearch.mdx b/docs/docs/integrations/providers/elasticsearch.mdx index 6311fe794df22..c3b123d47b80f 100644 --- a/docs/docs/integrations/providers/elasticsearch.mdx +++ b/docs/docs/integrations/providers/elasticsearch.mdx @@ -6,11 +6,14 @@ ## Installation and Setup +### Setup Elasticsearch + There are two ways to get started with Elasticsearch: -#### Install Elasticsearch on your local machine via docker +#### Install Elasticsearch on your local machine via Docker -Example: Run a single-node Elasticsearch instance with security disabled. This is not recommended for production use. +Example: Run a single-node Elasticsearch instance with security disabled. +This is not recommended for production use. ```bash docker run -p 9200:9200 -e "discovery.type=single-node" -e "xpack.security.enabled=false" -e "xpack.security.http.ssl.enabled=false" docker.elastic.co/elasticsearch/elasticsearch:8.9.0 @@ -18,7 +21,7 @@ Example: Run a single-node Elasticsearch instance with security disabled. This i #### Deploy Elasticsearch on Elastic Cloud -Elastic Cloud is a managed Elasticsearch service. Signup for a [free trial](https://cloud.elastic.co/registration?utm_source=langchain&utm_content=documentation). +`Elastic Cloud` is a managed Elasticsearch service. Signup for a [free trial](https://cloud.elastic.co/registration?utm_source=langchain&utm_content=documentation). ### Install Client @@ -43,7 +46,34 @@ See a [usage example](/docs/integrations/vectorstores/elasticsearch). from langchain_elasticsearch import ElasticsearchStore ``` +### Third-party integrations + +#### EcloudESVectorStore + +```python +from langchain_community.vectorstores.ecloud_vector_search import EcloudESVectorStore +``` + +## Retrievers + +### ElasticsearchRetriever + +The `ElasticsearchRetriever` enables flexible access to all Elasticsearch features +through the Query DSL. + +See a [usage example](/docs/integrations/retrievers/elasticsearch_retriever). + +```python +from langchain_elasticsearch import ElasticsearchRetriever +``` + +### BM25 + +See a [usage example](/docs/integrations/retrievers/elastic_search_bm25). +```python +from langchain_community.retrievers import ElasticSearchBM25Retriever +``` ## Memory See a [usage example](/docs/integrations/memory/elasticsearch_chat_message_history). @@ -67,3 +97,12 @@ See a [usage example](/docs/integrations/stores/elasticsearch). ```python from langchain_elasticsearch import ElasticsearchEmbeddingsCache ``` + +## Chain + +It is a chain for interacting with Elasticsearch Database. + +```python +from langchain.chains.elasticsearch_database import ElasticsearchDatabaseChain +``` + From 9447925d943bd2081bae8cccc0e1907ebb8d8d1c Mon Sep 17 00:00:00 2001 From: Erick Friis Date: Thu, 22 Aug 2024 10:21:19 -0700 Subject: [PATCH 66/80] cli: release 0.0.30 (#25672) --- libs/cli/poetry.lock | 238 +++++++++++++++++++++------------------- libs/cli/pyproject.toml | 2 +- 2 files changed, 127 insertions(+), 113 deletions(-) diff --git a/libs/cli/poetry.lock b/libs/cli/poetry.lock index 584c0a38557ea..b9eaa1b68a9a1 100644 --- a/libs/cli/poetry.lock +++ b/libs/cli/poetry.lock @@ -216,18 +216,18 @@ test = ["pytest (>=6)"] [[package]] name = "fastapi" -version = "0.112.0" +version = "0.112.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.112.0-py3-none-any.whl", hash = "sha256:3487ded9778006a45834b8c816ec4a48d522e2631ca9e75ec5a774f1b052f821"}, - {file = "fastapi-0.112.0.tar.gz", hash = "sha256:d262bc56b7d101d1f4e8fc0ad2ac75bb9935fec504d2b7117686cec50710cf05"}, + {file = "fastapi-0.112.1-py3-none-any.whl", hash = "sha256:bcbd45817fc2a1cd5da09af66815b84ec0d3d634eb173d1ab468ae3103e183e4"}, + {file = "fastapi-0.112.1.tar.gz", hash = "sha256:b2537146f8c23389a7faa8b03d0bd38d4986e6983874557d95eed2acc46448ef"}, ] [package.dependencies] pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.37.2,<0.38.0" +starlette = ">=0.37.2,<0.39.0" typing-extensions = ">=4.8.0" [package.extras] @@ -335,21 +335,25 @@ files = [ [[package]] name = "importlib-resources" -version = "6.4.0" +version = "6.4.4" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, - {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, + {file = "importlib_resources-6.4.4-py3-none-any.whl", hash = "sha256:dda242603d1c9cd836c3368b1174ed74cb4049ecd209e7a1a0104620c18c5c11"}, + {file = "importlib_resources-6.4.4.tar.gz", hash = "sha256:20600c8b7361938dc0bb2d5ec0297802e575df486f5a544fa414da65e13721f7"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "zipp (>=3.17)"] +type = ["pytest-mypy"] [[package]] name = "iniconfig" @@ -427,13 +431,13 @@ referencing = ">=0.31.0" [[package]] name = "langchain-core" -version = "0.2.29" +version = "0.2.34" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_core-0.2.29-py3-none-any.whl", hash = "sha256:846c04a3bb72e409a9b928e0eb3ea1762e1473f2c4fb6df2596fbd7b3ab75973"}, - {file = "langchain_core-0.2.29.tar.gz", hash = "sha256:491324745a7afee5a7b285c3904edd9dd0c6efa7daf26b92fec6e84a2d2f5d10"}, + {file = "langchain_core-0.2.34-py3-none-any.whl", hash = "sha256:c4fd158273e28cef758b4eccc956b424b76d4bb9117ce6014ae6eb2fb985801d"}, + {file = "langchain_core-0.2.34.tar.gz", hash = "sha256:50048d90b175c0d5a7e28164628b3c7f8c82b0dc2cd766a663d346a18d5c9eb2"}, ] [package.dependencies] @@ -475,16 +479,17 @@ server = ["fastapi (>=0.90.1,<1)", "sse-starlette (>=1.3.0,<2.0.0)"] [[package]] name = "langsmith" -version = "0.1.98" +version = "0.1.101" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.98-py3-none-any.whl", hash = "sha256:f79e8a128652bbcee4606d10acb6236973b5cd7dde76e3741186d3b97b5698e9"}, - {file = "langsmith-0.1.98.tar.gz", hash = "sha256:e07678219a0502e8f26d35294e72127a39d25e32fafd091af5a7bb661e9a6bd1"}, + {file = "langsmith-0.1.101-py3-none-any.whl", hash = "sha256:572e2c90709cda1ad837ac86cedda7295f69933f2124c658a92a35fb890477cc"}, + {file = "langsmith-0.1.101.tar.gz", hash = "sha256:caf4d95f314bb6cd3c4e0632eed821fd5cd5d0f18cb824772fce6d7a9113895b"}, ] [package.dependencies] +httpx = ">=0.23.0,<1" orjson = ">=3.9.14,<4.0.0" pydantic = [ {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, @@ -569,62 +574,68 @@ files = [ [[package]] name = "orjson" -version = "3.10.6" +version = "3.10.7" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.6-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:fb0ee33124db6eaa517d00890fc1a55c3bfe1cf78ba4a8899d71a06f2d6ff5c7"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c1c4b53b24a4c06547ce43e5fee6ec4e0d8fe2d597f4647fc033fd205707365"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eadc8fd310edb4bdbd333374f2c8fec6794bbbae99b592f448d8214a5e4050c0"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61272a5aec2b2661f4fa2b37c907ce9701e821b2c1285d5c3ab0207ebd358d38"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57985ee7e91d6214c837936dc1608f40f330a6b88bb13f5a57ce5257807da143"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:633a3b31d9d7c9f02d49c4ab4d0a86065c4a6f6adc297d63d272e043472acab5"}, - {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1c680b269d33ec444afe2bdc647c9eb73166fa47a16d9a75ee56a374f4a45f43"}, - {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f759503a97a6ace19e55461395ab0d618b5a117e8d0fbb20e70cfd68a47327f2"}, - {file = "orjson-3.10.6-cp310-none-win32.whl", hash = "sha256:95a0cce17f969fb5391762e5719575217bd10ac5a189d1979442ee54456393f3"}, - {file = "orjson-3.10.6-cp310-none-win_amd64.whl", hash = "sha256:df25d9271270ba2133cc88ee83c318372bdc0f2cd6f32e7a450809a111efc45c"}, - {file = "orjson-3.10.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b1ec490e10d2a77c345def52599311849fc063ae0e67cf4f84528073152bb2ba"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d43d3feb8f19d07e9f01e5b9be4f28801cf7c60d0fa0d279951b18fae1932b"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3045267e98fe749408eee1593a142e02357c5c99be0802185ef2170086a863"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27bc6a28ae95923350ab382c57113abd38f3928af3c80be6f2ba7eb8d8db0b0"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d27456491ca79532d11e507cadca37fb8c9324a3976294f68fb1eff2dc6ced5a"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05ac3d3916023745aa3b3b388e91b9166be1ca02b7c7e41045da6d12985685f0"}, - {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1335d4ef59ab85cab66fe73fd7a4e881c298ee7f63ede918b7faa1b27cbe5212"}, - {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4bbc6d0af24c1575edc79994c20e1b29e6fb3c6a570371306db0993ecf144dc5"}, - {file = "orjson-3.10.6-cp311-none-win32.whl", hash = "sha256:450e39ab1f7694465060a0550b3f6d328d20297bf2e06aa947b97c21e5241fbd"}, - {file = "orjson-3.10.6-cp311-none-win_amd64.whl", hash = "sha256:227df19441372610b20e05bdb906e1742ec2ad7a66ac8350dcfd29a63014a83b"}, - {file = "orjson-3.10.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ea2977b21f8d5d9b758bb3f344a75e55ca78e3ff85595d248eee813ae23ecdfb"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6f3d167d13a16ed263b52dbfedff52c962bfd3d270b46b7518365bcc2121eed"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f710f346e4c44a4e8bdf23daa974faede58f83334289df80bc9cd12fe82573c7"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7275664f84e027dcb1ad5200b8b18373e9c669b2a9ec33d410c40f5ccf4b257e"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0943e4c701196b23c240b3d10ed8ecd674f03089198cf503105b474a4f77f21f"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:446dee5a491b5bc7d8f825d80d9637e7af43f86a331207b9c9610e2f93fee22a"}, - {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:64c81456d2a050d380786413786b057983892db105516639cb5d3ee3c7fd5148"}, - {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:960db0e31c4e52fa0fc3ecbaea5b2d3b58f379e32a95ae6b0ebeaa25b93dfd34"}, - {file = "orjson-3.10.6-cp312-none-win32.whl", hash = "sha256:a6ea7afb5b30b2317e0bee03c8d34c8181bc5a36f2afd4d0952f378972c4efd5"}, - {file = "orjson-3.10.6-cp312-none-win_amd64.whl", hash = "sha256:874ce88264b7e655dde4aeaacdc8fd772a7962faadfb41abe63e2a4861abc3dc"}, - {file = "orjson-3.10.6-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:66680eae4c4e7fc193d91cfc1353ad6d01b4801ae9b5314f17e11ba55e934183"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caff75b425db5ef8e8f23af93c80f072f97b4fb3afd4af44482905c9f588da28"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3722fddb821b6036fd2a3c814f6bd9b57a89dc6337b9924ecd614ebce3271394"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2c116072a8533f2fec435fde4d134610f806bdac20188c7bd2081f3e9e0133f"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6eeb13218c8cf34c61912e9df2de2853f1d009de0e46ea09ccdf3d757896af0a"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965a916373382674e323c957d560b953d81d7a8603fbeee26f7b8248638bd48b"}, - {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03c95484d53ed8e479cade8628c9cea00fd9d67f5554764a1110e0d5aa2de96e"}, - {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e060748a04cccf1e0a6f2358dffea9c080b849a4a68c28b1b907f272b5127e9b"}, - {file = "orjson-3.10.6-cp38-none-win32.whl", hash = "sha256:738dbe3ef909c4b019d69afc19caf6b5ed0e2f1c786b5d6215fbb7539246e4c6"}, - {file = "orjson-3.10.6-cp38-none-win_amd64.whl", hash = "sha256:d40f839dddf6a7d77114fe6b8a70218556408c71d4d6e29413bb5f150a692ff7"}, - {file = "orjson-3.10.6-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:697a35a083c4f834807a6232b3e62c8b280f7a44ad0b759fd4dce748951e70db"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd502f96bf5ea9a61cbc0b2b5900d0dd68aa0da197179042bdd2be67e51a1e4b"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f215789fb1667cdc874c1b8af6a84dc939fd802bf293a8334fce185c79cd359b"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2debd8ddce948a8c0938c8c93ade191d2f4ba4649a54302a7da905a81f00b56"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5410111d7b6681d4b0d65e0f58a13be588d01b473822483f77f513c7f93bd3b2"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb1f28a137337fdc18384079fa5726810681055b32b92253fa15ae5656e1dddb"}, - {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf2fbbce5fe7cd1aa177ea3eab2b8e6a6bc6e8592e4279ed3db2d62e57c0e1b2"}, - {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:79b9b9e33bd4c517445a62b90ca0cc279b0f1f3970655c3df9e608bc3f91741a"}, - {file = "orjson-3.10.6-cp39-none-win32.whl", hash = "sha256:30b0a09a2014e621b1adf66a4f705f0809358350a757508ee80209b2d8dae219"}, - {file = "orjson-3.10.6-cp39-none-win_amd64.whl", hash = "sha256:49e3bc615652617d463069f91b867a4458114c5b104e13b7ae6872e5f79d0844"}, - {file = "orjson-3.10.6.tar.gz", hash = "sha256:e54b63d0a7c6c54a5f5f726bc93a2078111ef060fec4ecbf34c5db800ca3b3a7"}, + {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84"}, + {file = "orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175"}, + {file = "orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c"}, + {file = "orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0"}, + {file = "orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f"}, + {file = "orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5"}, + {file = "orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b"}, + {file = "orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb"}, + {file = "orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1"}, + {file = "orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149"}, + {file = "orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad"}, + {file = "orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2"}, + {file = "orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024"}, + {file = "orjson-3.10.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866"}, + {file = "orjson-3.10.7-cp38-none-win32.whl", hash = "sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c"}, + {file = "orjson-3.10.7-cp38-none-win_amd64.whl", hash = "sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e"}, + {file = "orjson-3.10.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5"}, + {file = "orjson-3.10.7-cp39-none-win32.whl", hash = "sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2"}, + {file = "orjson-3.10.7-cp39-none-win_amd64.whl", hash = "sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58"}, + {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, ] [[package]] @@ -1143,19 +1154,19 @@ files = [ [[package]] name = "setuptools" -version = "72.1.0" +version = "73.0.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, - {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, + {file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"}, + {file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"}, ] [package.extras] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] [[package]] name = "shellingham" @@ -1209,13 +1220,13 @@ uvicorn = "*" [[package]] name = "starlette" -version = "0.37.2" +version = "0.38.2" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, - {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, + {file = "starlette-0.38.2-py3-none-any.whl", hash = "sha256:4ec6a59df6bbafdab5f567754481657f7ed90dc9d69b0c9ff017907dd54faeff"}, + {file = "starlette-0.38.2.tar.gz", hash = "sha256:c7c0441065252160993a1a37cf2a73bb64d271b17303e0b0c1eb7191cfb12d75"}, ] [package.dependencies] @@ -1346,43 +1357,46 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", [[package]] name = "watchdog" -version = "4.0.1" +version = "4.0.2" description = "Filesystem events monitoring" optional = false python-versions = ">=3.8" files = [ - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, - {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, - {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, - {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, - {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"}, + {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"}, + {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"}, + {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"}, + {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"}, ] [package.extras] @@ -1404,13 +1418,13 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, ] [package.extras] diff --git a/libs/cli/pyproject.toml b/libs/cli/pyproject.toml index 4ebba80bb6e5b..54bce81532e04 100644 --- a/libs/cli/pyproject.toml +++ b/libs/cli/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-cli" -version = "0.0.29" +version = "0.0.30" description = "CLI for interacting with LangChain" authors = ["Erick Friis "] readme = "README.md" From 624e0747b987ae7acbba96cf7da941af843763d0 Mon Sep 17 00:00:00 2001 From: Leonid Ganeline Date: Thu, 22 Aug 2024 10:21:54 -0700 Subject: [PATCH 67/80] docs: `integrations` reference updates 10 (#25556) Added missed provider pages. Added descriptions, links. --- docs/docs/integrations/platforms/google.mdx | 5 ++++ .../docs/integrations/platforms/microsoft.mdx | 16 ++++++++++ docs/docs/integrations/providers/connery.mdx | 28 ++++++++++++++++++ .../integrations/providers/dashvector.mdx | 17 ++++++++++- .../integrations/providers/dataforseo.mdx | 9 +++++- docs/docs/integrations/providers/dingo.mdx | 18 ++++++++++-- docs/docs/integrations/providers/docarray.mdx | 9 +++++- docs/docs/integrations/providers/pandas.mdx | 29 +++++++++++++++++++ 8 files changed, 125 insertions(+), 6 deletions(-) create mode 100644 docs/docs/integrations/providers/connery.mdx create mode 100644 docs/docs/integrations/providers/pandas.mdx diff --git a/docs/docs/integrations/platforms/google.mdx b/docs/docs/integrations/platforms/google.mdx index 19da4679c5d5b..bc8d17d25744f 100644 --- a/docs/docs/integrations/platforms/google.mdx +++ b/docs/docs/integrations/platforms/google.mdx @@ -625,6 +625,7 @@ from langchain.retrievers import GoogleVertexAISearchRetriever > from Google Cloud allows enterprises to search, store, govern, and manage documents and their AI-extracted > data and metadata in a single platform. +Note: `GoogleDocumentAIWarehouseRetriever` is deprecated, use `DocumentAIWarehouseRetriever` (see below). ```python from langchain.retrievers import GoogleDocumentAIWarehouseRetriever docai_wh_retriever = GoogleDocumentAIWarehouseRetriever( @@ -636,6 +637,10 @@ documents = docai_wh_retriever.invoke( ) ``` +```python +from langchain_google_community.documentai_warehouse import DocumentAIWarehouseRetriever +``` + ## Tools ### Text-to-Speech diff --git a/docs/docs/integrations/platforms/microsoft.mdx b/docs/docs/integrations/platforms/microsoft.mdx index a0d6807ed7793..ed282710ccc41 100644 --- a/docs/docs/integrations/platforms/microsoft.mdx +++ b/docs/docs/integrations/platforms/microsoft.mdx @@ -466,6 +466,22 @@ See a [usage example](/docs/integrations/tools/playwright). from langchain_community.agent_toolkits import PlayWrightBrowserToolkit ``` +#### PlayWright Browser individual tools + +You can use individual tools from the PlayWright Browser Toolkit. + +```python +from langchain_community.tools.playwright import ClickTool +from langchain_community.tools.playwright import CurrentWebPageTool +from langchain_community.tools.playwright import ExtractHyperlinksTool +from langchain_community.tools.playwright import ExtractTextTool +from langchain_community.tools.playwright import GetElementsTool +from langchain_community.tools.playwright import NavigateTool +from langchain_community.tools.playwright import NavigateBackTool +``` + + +```python ## Graphs ### Azure Cosmos DB for Apache Gremlin diff --git a/docs/docs/integrations/providers/connery.mdx b/docs/docs/integrations/providers/connery.mdx new file mode 100644 index 0000000000000..36684a97fa0e9 --- /dev/null +++ b/docs/docs/integrations/providers/connery.mdx @@ -0,0 +1,28 @@ +# Connery + +>[Connery SDK](https://github.com/connery-io/connery-sdk) is an NPM package that +> includes both an SDK and a CLI, designed for the development of plugins and actions. +> +>The CLI automates many things in the development process. The SDK +> offers a JavaScript API for defining plugins and actions and packaging them +> into a plugin server with a standardized REST API generated from the metadata. +> The plugin server handles authorization, input validation, and logging. +> So you can focus on the logic of your actions. +> +> See the use cases and examples in the [Connery SDK documentation](https://sdk.connery.io/docs/use-cases/) + +## Toolkit + +See [usage example](/docs/integrations/tools/connery). + +```python +from langchain_community.agent_toolkits.connery import ConneryToolkit +``` + +## Tools + +### ConneryAction + +```python +from langchain_community.tools.connery import ConneryService +``` diff --git a/docs/docs/integrations/providers/dashvector.mdx b/docs/docs/integrations/providers/dashvector.mdx index b18fca590b6a5..b7ded751ddf7d 100644 --- a/docs/docs/integrations/providers/dashvector.mdx +++ b/docs/docs/integrations/providers/dashvector.mdx @@ -6,12 +6,27 @@ This document demonstrates to leverage DashVector within the LangChain ecosystem It is broken into two parts: installation and setup, and then references to specific DashVector wrappers. ## Installation and Setup + + Install the Python SDK: + ```bash pip install dashvector ``` -## VectorStore +You must have an API key. Here are the [installation instructions](https://help.aliyun.com/document_detail/2510223.html). + + +## Embedding models + +```python +from langchain_community.embeddings import DashScopeEmbeddings +``` + +See the [use example](/docs/integrations/vectorstores/dashvector). + + +## Vector Store A DashVector Collection is wrapped as a familiar VectorStore for native usage within LangChain, which allows it to be readily used for various scenarios, such as semantic search or example selection. diff --git a/docs/docs/integrations/providers/dataforseo.mdx b/docs/docs/integrations/providers/dataforseo.mdx index 3be8ed0f2be17..37d8884fa4b42 100644 --- a/docs/docs/integrations/providers/dataforseo.mdx +++ b/docs/docs/integrations/providers/dataforseo.mdx @@ -19,7 +19,7 @@ os.environ["DATAFORSEO_PASSWORD"] = "your_password" ## Utility -The DataForSEO utility wraps the API. To import this utility, use: +The `DataForSEO` utility wraps the API. To import this utility, use: ```python from langchain_community.utilities.dataforseo_api_search import DataForSeoAPIWrapper @@ -36,6 +36,13 @@ from langchain.agents import load_tools tools = load_tools(["dataforseo-api-search"]) ``` +This will load the following tools: + +```python +from langchain_community.tools import DataForSeoAPISearchRun +from langchain_community.tools import DataForSeoAPISearchResults +``` + ## Example usage ```python diff --git a/docs/docs/integrations/providers/dingo.mdx b/docs/docs/integrations/providers/dingo.mdx index be0c9f83faba3..b12a6a72cbc6c 100644 --- a/docs/docs/integrations/providers/dingo.mdx +++ b/docs/docs/integrations/providers/dingo.mdx @@ -1,10 +1,21 @@ # DingoDB -This page covers how to use the DingoDB ecosystem within LangChain. -It is broken into two parts: installation and setup, and then references to specific DingoDB wrappers. +>[DingoDB](https://github.com/dingodb) is a distributed multi-modal vector +> database. It combines the features of a data lake and a vector database, +> allowing for the storage of any type of data (key-value, PDF, audio, +> video, etc.) regardless of its size. Utilizing DingoDB, you can construct +> your own Vector Ocean (the next-generation data architecture following data +> warehouse and data lake). This enables +> the analysis of both structured and unstructured data through +> a singular SQL with exceptionally low latency in real time. ## Installation and Setup -- Install the Python SDK with `pip install dingodb` + +Install the Python SDK + +```bash +pip install dingodb +``` ## VectorStore @@ -12,6 +23,7 @@ There exists a wrapper around DingoDB indexes, allowing you to use it as a vecto whether for semantic search or example selection. To import this vectorstore: + ```python from langchain_community.vectorstores import Dingo ``` diff --git a/docs/docs/integrations/providers/docarray.mdx b/docs/docs/integrations/providers/docarray.mdx index 5895fa30f798c..d1d41a19834d1 100644 --- a/docs/docs/integrations/providers/docarray.mdx +++ b/docs/docs/integrations/providers/docarray.mdx @@ -20,7 +20,7 @@ LangChain provides an access to the `In-memory` and `HNSW` vector stores from th See a [usage example](/docs/integrations/vectorstores/docarray_hnsw). ```python -from langchain_community.vectorstores DocArrayHnswSearch +from langchain_community.vectorstores import DocArrayHnswSearch ``` See a [usage example](/docs/integrations/vectorstores/docarray_in_memory). @@ -28,3 +28,10 @@ See a [usage example](/docs/integrations/vectorstores/docarray_in_memory). from langchain_community.vectorstores DocArrayInMemorySearch ``` +## Retriever + +See a [usage example](/docs/integrations/retrievers/docarray_retriever). + +```python +from langchain_community.retrievers import DocArrayRetriever +``` diff --git a/docs/docs/integrations/providers/pandas.mdx b/docs/docs/integrations/providers/pandas.mdx new file mode 100644 index 0000000000000..15519b0b0f792 --- /dev/null +++ b/docs/docs/integrations/providers/pandas.mdx @@ -0,0 +1,29 @@ +# Pandas + +>[pandas](https://pandas.pydata.org) is a fast, powerful, flexible and easy to use open source data analysis and manipulation tool, +built on top of the `Python` programming language. + +## Installation and Setup + +Install the `pandas` package using `pip`: + +```bash +pip install pandas +``` + + +## Document loader + +See a [usage example](/docs/integrations/document_loaders/pandas_dataframe). + +```python +from langchain_community.document_loaders import DataFrameLoader +``` + +## Toolkit + +See a [usage example](/docs/integrations/tools/pandas). + +```python +from langchain_experimental.agents.agent_toolkits import create_pandas_dataframe_agent +``` From 3da752c7bb3eb0816366d909f250daeceded5eed Mon Sep 17 00:00:00 2001 From: Maurits Bos <35366788+MbBrainz@users.noreply.github.com> Date: Thu, 22 Aug 2024 19:22:50 +0200 Subject: [PATCH 68/80] Update pyproject.toml of package`openai-functions-agent-gmail` to prevent `ModuleOrPackageNotFound` error (#25597) I was trying to add this package using langchain-cli: `langchain app add openai-functions-agent-gmail`, but when then try to build the whole project using poetry or pip, it fails with the following error:`poetry.core.masonry.utils.module.ModuleOrPackageNotFound: No file/folder found for package openai-functions-agent-gmail` This was fixed by modifying the pyproject.toml as in this commit Thank you for contributing to LangChain! - [ ] **PR title**: "package: description" - Where "package" is whichever of langchain, community, core, experimental, etc. is being modified. Use "docs: ..." for purely docs changes, "templates: ..." for template changes, "infra: ..." for CI changes. - Example: "community: add foobar LLM" - [ ] **PR message**: ***Delete this entire checklist*** and replace with - **Description:** a description of the change - **Issue:** the issue # it fixes, if applicable - **Dependencies:** any dependencies required for this change - **Twitter handle:** if your PR gets announced, and you'd like a mention, we'll gladly shout you out! - [ ] **Add tests and docs**: If you're adding a new integration, please include 1. a test for the integration, preferably unit tests that do not rely on network access, 2. an example notebook showing its use. It lives in `docs/docs/integrations` directory. - [ ] **Lint and test**: Run `make format`, `make lint` and `make test` from the root of the package(s) you've modified. See contribution guidelines for more: https://python.langchain.com/docs/contributing/ Additional guidelines: - Make sure optional dependencies are imported within a function. - Please do not add dependencies to pyproject.toml files (even optional ones) unless they are required for unit tests. - Most PRs should not touch more than one package. - Changes should be backwards compatible. - If you are adding something to community, do not re-import it in langchain. If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, ccurme, vbarda, hwchase17. --- templates/openai-functions-agent-gmail/pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/templates/openai-functions-agent-gmail/pyproject.toml b/templates/openai-functions-agent-gmail/pyproject.toml index a8bd251bf97bd..c37a7a1a14254 100644 --- a/templates/openai-functions-agent-gmail/pyproject.toml +++ b/templates/openai-functions-agent-gmail/pyproject.toml @@ -6,6 +6,9 @@ authors = [ "Lance Martin ", ] readme = "README.md" +packages = [ + { include = "openai-functions-agent" }, +] [tool.poetry.dependencies] python = ">=3.8.1,<4.0" From d886f4e107959dff4bd234f4f87b51dede06a8d5 Mon Sep 17 00:00:00 2001 From: Leonid Ganeline Date: Thu, 22 Aug 2024 10:25:41 -0700 Subject: [PATCH 69/80] docs: `integrations` reference update 9 (#25511) Added missed provider pages. Added missed references and descriptions. --- docs/docs/integrations/platforms/google.mdx | 12 +++++ docs/docs/integrations/providers/clarifai.mdx | 21 ++++---- docs/docs/integrations/providers/clickup.mdx | 20 +++++++ .../integrations/providers/cloudflare.mdx | 7 +++ docs/docs/integrations/providers/clova.mdx | 14 +++++ .../integrations/providers/cogniswitch.mdx | 53 +++++++++++++++++++ 6 files changed, 117 insertions(+), 10 deletions(-) create mode 100644 docs/docs/integrations/providers/clickup.mdx create mode 100644 docs/docs/integrations/providers/clova.mdx create mode 100644 docs/docs/integrations/providers/cogniswitch.mdx diff --git a/docs/docs/integrations/platforms/google.mdx b/docs/docs/integrations/platforms/google.mdx index bc8d17d25744f..39e1a866320ff 100644 --- a/docs/docs/integrations/platforms/google.mdx +++ b/docs/docs/integrations/platforms/google.mdx @@ -269,6 +269,18 @@ See a [usage example](/docs/integrations/document_loaders/google_cloud_storage_f from langchain_google_community import GCSFileLoader ``` +### Cloud Vision loader + +Install the python package: + +```bash +pip install langchain-google-community[vision] +``` + +```python +from langchain_google_community.vision import CloudVisionLoader +``` + ### El Carro for Oracle Workloads > Google [El Carro Oracle Operator](https://github.com/GoogleCloudPlatform/elcarro-oracle-operator) diff --git a/docs/docs/integrations/providers/clarifai.mdx b/docs/docs/integrations/providers/clarifai.mdx index 945dcae0f1691..e783833255490 100644 --- a/docs/docs/integrations/providers/clarifai.mdx +++ b/docs/docs/integrations/providers/clarifai.mdx @@ -1,6 +1,11 @@ # Clarifai >[Clarifai](https://clarifai.com) is one of first deep learning platforms having been founded in 2013. Clarifai provides an AI platform with the full AI lifecycle for data exploration, data labeling, model training, evaluation and inference around images, video, text and audio data. In the LangChain ecosystem, as far as we're aware, Clarifai is the only provider that supports LLMs, embeddings and a vector store in one production scale platform, making it an excellent choice to operationalize your LangChain implementations. +> +> `Clarifai` provides 1,000s of AI models for many different use cases. You can [explore them here](https://clarifai.com/explore) to find the one most suited for your use case. These models include those created by other providers such as OpenAI, Anthropic, Cohere, AI21, etc. as well as state of the art from open source such as Falcon, InstructorXL, etc. so that you build the best in AI into your products. You'll find these organized by the creator's user_id and into projects we call applications denoted by their app_id. Those IDs will be needed in additional to the model_id and optionally the version_id, so make note of all these IDs once you found the best model for your use case! +> +>Also note that given there are many models for images, video, text and audio understanding, you can build some interested AI agents that utilize the variety of AI models as experts to understand those data types. + ## Installation and Setup - Install the Python SDK: @@ -10,13 +15,7 @@ pip install clarifai [Sign-up](https://clarifai.com/signup) for a Clarifai account, then get a personal access token to access the Clarifai API from your [security settings](https://clarifai.com/settings/security) and set it as an environment variable (`CLARIFAI_PAT`). -## Models - -Clarifai provides 1,000s of AI models for many different use cases. You can [explore them here](https://clarifai.com/explore) to find the one most suited for your use case. These models include those created by other providers such as OpenAI, Anthropic, Cohere, AI21, etc. as well as state of the art from open source such as Falcon, InstructorXL, etc. so that you build the best in AI into your products. You'll find these organized by the creator's user_id and into projects we call applications denoted by their app_id. Those IDs will be needed in additional to the model_id and optionally the version_id, so make note of all these IDs once you found the best model for your use case! - -Also note that given there are many models for images, video, text and audio understanding, you can build some interested AI agents that utilize the variety of AI models as experts to understand those data types. - -### LLMs +## LLMs To find the selection of LLMs in the Clarifai platform you can select the text to text model type [here](https://clarifai.com/explore/models?filterData=%5B%7B%22field%22%3A%22model_type_id%22%2C%22value%22%3A%5B%22text-to-text%22%5D%7D%5D&page=1&perPage=24). @@ -28,16 +27,18 @@ llm = Clarifai(pat=CLARIFAI_PAT, user_id=USER_ID, app_id=APP_ID, model_id=MODEL_ For more details, the docs on the Clarifai LLM wrapper provide a [detailed walkthrough](/docs/integrations/llms/clarifai). -### Text Embedding Models +## Embedding Models -To find the selection of text embeddings models in the Clarifai platform you can select the text to embedding model type [here](https://clarifai.com/explore/models?page=1&perPage=24&filterData=%5B%7B%22field%22%3A%22model_type_id%22%2C%22value%22%3A%5B%22text-embedder%22%5D%7D%5D). +To find the selection of embeddings models in the Clarifai platform you can select the text to embedding model type [here](https://clarifai.com/explore/models?page=1&perPage=24&filterData=%5B%7B%22field%22%3A%22model_type_id%22%2C%22value%22%3A%5B%22text-embedder%22%5D%7D%5D). There is a Clarifai Embedding model in LangChain, which you can access with: ```python from langchain_community.embeddings import ClarifaiEmbeddings embeddings = ClarifaiEmbeddings(pat=CLARIFAI_PAT, user_id=USER_ID, app_id=APP_ID, model_id=MODEL_ID) ``` -For more details, the docs on the Clarifai Embeddings wrapper provide a [detailed walkthrough](/docs/integrations/text_embedding/clarifai). + +See a [usage example](/docs/integrations/document_loaders/couchbase). + ## Vectorstore diff --git a/docs/docs/integrations/providers/clickup.mdx b/docs/docs/integrations/providers/clickup.mdx new file mode 100644 index 0000000000000..256ae2cace4e6 --- /dev/null +++ b/docs/docs/integrations/providers/clickup.mdx @@ -0,0 +1,20 @@ +# ClickUp + +>[ClickUp](https://clickup.com/) is an all-in-one productivity platform that provides small and large teams across industries with flexible and customizable work management solutions, tools, and functions. +> +>It is a cloud-based project management solution for businesses of all sizes featuring communication and collaboration tools to help achieve organizational goals. + +## Installation and Setup + +1. Create a [ClickUp App](https://help.clickup.com/hc/en-us/articles/6303422883095-Create-your-own-app-with-the-ClickUp-API) +2. Follow [these steps](https://clickup.com/api/developer-portal/authentication/) to get your client_id and client_secret. + +## Toolkits + +```python +from langchain_community.agent_toolkits.clickup.toolkit import ClickupToolkit +from langchain_community.utilities.clickup import ClickupAPIWrapper +``` + +See a [usage example](/docs/integrations/tools/clickup). + diff --git a/docs/docs/integrations/providers/cloudflare.mdx b/docs/docs/integrations/providers/cloudflare.mdx index ad1223ddc15e7..d7a4e8b8bed14 100644 --- a/docs/docs/integrations/providers/cloudflare.mdx +++ b/docs/docs/integrations/providers/cloudflare.mdx @@ -8,6 +8,13 @@ > learning models, on the `Cloudflare` network, from your code via REST API. +## LLMs + +See [installation instructions and usage example](/docs/integrations/llms/cloudflare_workersai). + +```python +from langchain_community.llms.cloudflare_workersai import CloudflareWorkersAI +``` ## Embedding models diff --git a/docs/docs/integrations/providers/clova.mdx b/docs/docs/integrations/providers/clova.mdx new file mode 100644 index 0000000000000..b10aa93051136 --- /dev/null +++ b/docs/docs/integrations/providers/clova.mdx @@ -0,0 +1,14 @@ +# Clova + +>[CLOVA Studio](https://api.ncloud-docs.com/docs/ai-naver-clovastudio-summary) is a service +> of [Naver Cloud Platform](https://www.ncloud.com/) that uses `HyperCLOVA` language models, +> a hyperscale AI technology, to output phrases generated through AI technology based on user input. + + +## Embedding models + +See [installation instructions and usage example](/docs/integrations/text_embedding/clova). + +```python +from langchain_community.embeddings import ClovaEmbeddings +``` diff --git a/docs/docs/integrations/providers/cogniswitch.mdx b/docs/docs/integrations/providers/cogniswitch.mdx new file mode 100644 index 0000000000000..d8aee6a4c9d5c --- /dev/null +++ b/docs/docs/integrations/providers/cogniswitch.mdx @@ -0,0 +1,53 @@ +# CogniSwitch + +>[CogniSwitch](https://www.cogniswitch.ai/aboutus) is an API based data platform that +> enhances enterprise data by extracting entities, concepts and their relationships +> thereby converting this data into a multidimensional format and storing it in +> a database that can accommodate these enhancements. In our case the data is stored +> in a knowledge graph. This enhanced data is now ready for consumption by LLMs and +> other GenAI applications ensuring the data is consumable and context can be maintained. +> Thereby eliminating hallucinations and delivering accuracy. + +## Toolkit + +See [installation instructions and usage example](/docs/integrations/tools/cogniswitch). + +```python +from langchain_community.agent_toolkits import CogniswitchToolkit +``` + +## Tools + +### CogniswitchKnowledgeRequest + +>Tool that uses the CogniSwitch service to answer questions. + +```python +from langchain_community.tools.cogniswitch.tool import CogniswitchKnowledgeRequest +``` + +### CogniswitchKnowledgeSourceFile + +>Tool that uses the CogniSwitch services to store data from file. + +```python +from langchain_community.tools.cogniswitch.tool import CogniswitchKnowledgeSourceFile +``` + +### CogniswitchKnowledgeSourceURL + +>Tool that uses the CogniSwitch services to store data from a URL. + +```python +from langchain_community.tools.cogniswitch.tool import CogniswitchKnowledgeSourceURL +``` + +### CogniswitchKnowledgeStatus + +>Tool that uses the CogniSwitch services to get the status of the document or url uploaded. + +```python +from langchain_community.tools.cogniswitch.tool import CogniswitchKnowledgeStatus +``` + + From 61228da1c4cb73515e893d56f72a372210107500 Mon Sep 17 00:00:00 2001 From: sslee <91924792+sslcandoit@users.noreply.github.com> Date: Thu, 22 Aug 2024 13:33:53 -0400 Subject: [PATCH 70/80] fix typo (#25673) --- cookbook/Multi_modal_RAG_google.ipynb | 2 +- cookbook/langgraph_self_rag.ipynb | 11 ++++++++--- cookbook/rag-locally-on-intel-cpu.ipynb | 13 +++++++++---- 3 files changed, 18 insertions(+), 8 deletions(-) diff --git a/cookbook/Multi_modal_RAG_google.ipynb b/cookbook/Multi_modal_RAG_google.ipynb index 9f4d6615b6810..35bdfff764295 100644 --- a/cookbook/Multi_modal_RAG_google.ipynb +++ b/cookbook/Multi_modal_RAG_google.ipynb @@ -445,7 +445,7 @@ "\n", "\n", "def plt_img_base64(img_base64):\n", - " \"\"\"Disply base64 encoded string as image\"\"\"\n", + " \"\"\"Display base64 encoded string as image\"\"\"\n", " # Create an HTML img tag with the base64 string as the source\n", " image_html = f''\n", " # Display the image by rendering the HTML\n", diff --git a/cookbook/langgraph_self_rag.ipynb b/cookbook/langgraph_self_rag.ipynb index c6b1e75406696..8d61a84287463 100644 --- a/cookbook/langgraph_self_rag.ipynb +++ b/cookbook/langgraph_self_rag.ipynb @@ -336,7 +336,7 @@ " # Create a prompt template with format instructions and the query\n", " prompt = PromptTemplate(\n", " template=\"\"\"You are generating questions that is well optimized for retrieval. \\n \n", - " Look at the input and try to reason about the underlying sematic intent / meaning. \\n \n", + " Look at the input and try to reason about the underlying semantic intent / meaning. \\n \n", " Here is the initial question:\n", " \\n ------- \\n\n", " {question} \n", @@ -643,7 +643,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3.11.1 64-bit", "language": "python", "name": "python3" }, @@ -657,7 +657,12 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.16" + "version": "3.11.1" + }, + "vscode": { + "interpreter": { + "hash": "1a1af0ee75eeea9e2e1ee996c87e7a2b11a0bebd85af04bb136d915cefc0abce" + } } }, "nbformat": 4, diff --git a/cookbook/rag-locally-on-intel-cpu.ipynb b/cookbook/rag-locally-on-intel-cpu.ipynb index 2eb6dec9737ef..fc059114133fe 100644 --- a/cookbook/rag-locally-on-intel-cpu.ipynb +++ b/cookbook/rag-locally-on-intel-cpu.ipynb @@ -647,7 +647,7 @@ "metadata": {}, "source": [ "**Now we see the results are correct as it is mentioned in earnings release.**
\n", - "**To further automate, we will create a chain that will take input as question and retriever so that we don't need to retrieve documents seperately**" + "**To further automate, we will create a chain that will take input as question and retriever so that we don't need to retrieve documents separately**" ] }, { @@ -734,9 +734,9 @@ ], "metadata": { "kernelspec": { - "display_name": "rag-on-intel", + "display_name": "Python 3.11.1 64-bit", "language": "python", - "name": "rag-on-intel" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -748,7 +748,12 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.9" + "version": "3.11.1" + }, + "vscode": { + "interpreter": { + "hash": "1a1af0ee75eeea9e2e1ee996c87e7a2b11a0bebd85af04bb136d915cefc0abce" + } } }, "nbformat": 4, From b4fcda7657bd112503deca47ca3e41980f1fe6f5 Mon Sep 17 00:00:00 2001 From: Hasan Kumar Date: Thu, 22 Aug 2024 08:02:02 -1000 Subject: [PATCH 71/80] langchain: Fix type warnings when passing Runnable as agent to AgentExecutor (#24750) Fix for https://github.com/langchain-ai/langchain/issues/13075 --------- Co-authored-by: Chester Curme --- libs/langchain/langchain/agents/agent.py | 55 ++++++++++++------- .../langchain/agents/agent_iterator.py | 4 +- .../unit_tests/agents/test_initialize.py | 5 +- 3 files changed, 42 insertions(+), 22 deletions(-) diff --git a/libs/langchain/langchain/agents/agent.py b/libs/langchain/langchain/agents/agent.py index 16f648bea9ed9..50797228a53b8 100644 --- a/libs/langchain/langchain/agents/agent.py +++ b/libs/langchain/langchain/agents/agent.py @@ -19,6 +19,7 @@ Sequence, Tuple, Union, + cast, ) import yaml @@ -1042,12 +1043,13 @@ async def _arun( NextStepOutput = List[Union[AgentFinish, AgentAction, AgentStep]] +RunnableAgentType = Union[RunnableAgent, RunnableMultiActionAgent] class AgentExecutor(Chain): """Agent that is using tools.""" - agent: Union[BaseSingleActionAgent, BaseMultiActionAgent] + agent: Union[BaseSingleActionAgent, BaseMultiActionAgent, Runnable] """The agent to run for creating a plan and determining actions to take at each step of the execution loop.""" tools: Sequence[BaseTool] @@ -1095,7 +1097,7 @@ class AgentExecutor(Chain): @classmethod def from_agent_and_tools( cls, - agent: Union[BaseSingleActionAgent, BaseMultiActionAgent], + agent: Union[BaseSingleActionAgent, BaseMultiActionAgent, Runnable], tools: Sequence[BaseTool], callbacks: Callbacks = None, **kwargs: Any, @@ -1172,6 +1174,21 @@ def validate_runnable_agent(cls, values: Dict) -> Dict: ) return values + @property + def _action_agent(self) -> Union[BaseSingleActionAgent, BaseMultiActionAgent]: + """Type cast self.agent. + + The .agent attribute type includes Runnable, but is converted to one of + RunnableAgentType in the validate_runnable_agent root_validator. + + To support instantiating with a Runnable, here we explicitly cast the type + to reflect the changes made in the root_validator. + """ + if isinstance(self.agent, Runnable): + return cast(RunnableAgentType, self.agent) + else: + return self.agent + def save(self, file_path: Union[Path, str]) -> None: """Raise error - saving not supported for Agent Executors. @@ -1193,7 +1210,7 @@ def save_agent(self, file_path: Union[Path, str]) -> None: Args: file_path: Path to save to. """ - return self.agent.save(file_path) + return self._action_agent.save(file_path) def iter( self, @@ -1228,7 +1245,7 @@ def input_keys(self) -> List[str]: :meta private: """ - return self.agent.input_keys + return self._action_agent.input_keys @property def output_keys(self) -> List[str]: @@ -1237,9 +1254,9 @@ def output_keys(self) -> List[str]: :meta private: """ if self.return_intermediate_steps: - return self.agent.return_values + ["intermediate_steps"] + return self._action_agent.return_values + ["intermediate_steps"] else: - return self.agent.return_values + return self._action_agent.return_values def lookup_tool(self, name: str) -> BaseTool: """Lookup tool by name. @@ -1339,7 +1356,7 @@ def _iter_next_step( intermediate_steps = self._prepare_intermediate_steps(intermediate_steps) # Call the LLM to see what to do. - output = self.agent.plan( + output = self._action_agent.plan( intermediate_steps, callbacks=run_manager.get_child() if run_manager else None, **inputs, @@ -1372,7 +1389,7 @@ def _iter_next_step( output = AgentAction("_Exception", observation, text) if run_manager: run_manager.on_agent_action(output, color="green") - tool_run_kwargs = self.agent.tool_run_logging_kwargs() + tool_run_kwargs = self._action_agent.tool_run_logging_kwargs() observation = ExceptionTool().run( output.tool_input, verbose=self.verbose, @@ -1414,7 +1431,7 @@ def _perform_agent_action( tool = name_to_tool_map[agent_action.tool] return_direct = tool.return_direct color = color_mapping[agent_action.tool] - tool_run_kwargs = self.agent.tool_run_logging_kwargs() + tool_run_kwargs = self._action_agent.tool_run_logging_kwargs() if return_direct: tool_run_kwargs["llm_prefix"] = "" # We then call the tool on the tool input to get an observation @@ -1426,7 +1443,7 @@ def _perform_agent_action( **tool_run_kwargs, ) else: - tool_run_kwargs = self.agent.tool_run_logging_kwargs() + tool_run_kwargs = self._action_agent.tool_run_logging_kwargs() observation = InvalidTool().run( { "requested_tool_name": agent_action.tool, @@ -1476,7 +1493,7 @@ async def _aiter_next_step( intermediate_steps = self._prepare_intermediate_steps(intermediate_steps) # Call the LLM to see what to do. - output = await self.agent.aplan( + output = await self._action_agent.aplan( intermediate_steps, callbacks=run_manager.get_child() if run_manager else None, **inputs, @@ -1507,7 +1524,7 @@ async def _aiter_next_step( else: raise ValueError("Got unexpected type of `handle_parsing_errors`") output = AgentAction("_Exception", observation, text) - tool_run_kwargs = self.agent.tool_run_logging_kwargs() + tool_run_kwargs = self._action_agent.tool_run_logging_kwargs() observation = await ExceptionTool().arun( output.tool_input, verbose=self.verbose, @@ -1561,7 +1578,7 @@ async def _aperform_agent_action( tool = name_to_tool_map[agent_action.tool] return_direct = tool.return_direct color = color_mapping[agent_action.tool] - tool_run_kwargs = self.agent.tool_run_logging_kwargs() + tool_run_kwargs = self._action_agent.tool_run_logging_kwargs() if return_direct: tool_run_kwargs["llm_prefix"] = "" # We then call the tool on the tool input to get an observation @@ -1573,7 +1590,7 @@ async def _aperform_agent_action( **tool_run_kwargs, ) else: - tool_run_kwargs = self.agent.tool_run_logging_kwargs() + tool_run_kwargs = self._action_agent.tool_run_logging_kwargs() observation = await InvalidTool().arun( { "requested_tool_name": agent_action.tool, @@ -1628,7 +1645,7 @@ def _call( ) iterations += 1 time_elapsed = time.time() - start_time - output = self.agent.return_stopped_response( + output = self._action_agent.return_stopped_response( self.early_stopping_method, intermediate_steps, **inputs ) return self._return(output, intermediate_steps, run_manager=run_manager) @@ -1680,7 +1697,7 @@ async def _acall( iterations += 1 time_elapsed = time.time() - start_time - output = self.agent.return_stopped_response( + output = self._action_agent.return_stopped_response( self.early_stopping_method, intermediate_steps, **inputs ) return await self._areturn( @@ -1688,7 +1705,7 @@ async def _acall( ) except (TimeoutError, asyncio.TimeoutError): # stop early when interrupted by the async timeout - output = self.agent.return_stopped_response( + output = self._action_agent.return_stopped_response( self.early_stopping_method, intermediate_steps, **inputs ) return await self._areturn( @@ -1702,8 +1719,8 @@ def _get_tool_return( agent_action, observation = next_step_output name_to_tool_map = {tool.name: tool for tool in self.tools} return_value_key = "output" - if len(self.agent.return_values) > 0: - return_value_key = self.agent.return_values[0] + if len(self._action_agent.return_values) > 0: + return_value_key = self._action_agent.return_values[0] # Invalid tools won't be in the map, so we return False. if agent_action.tool in name_to_tool_map: if name_to_tool_map[agent_action.tool].return_direct: diff --git a/libs/langchain/langchain/agents/agent_iterator.py b/libs/langchain/langchain/agents/agent_iterator.py index a2c51efd652af..ddd742e1c6760 100644 --- a/libs/langchain/langchain/agents/agent_iterator.py +++ b/libs/langchain/langchain/agents/agent_iterator.py @@ -371,7 +371,7 @@ def _stop(self, run_manager: CallbackManagerForChainRun) -> AddableDict: """ logger.warning("Stopping agent prematurely due to triggering stop condition") # this manually constructs agent finish with output key - output = self.agent_executor.agent.return_stopped_response( + output = self.agent_executor._action_agent.return_stopped_response( self.agent_executor.early_stopping_method, self.intermediate_steps, **self.inputs, @@ -384,7 +384,7 @@ async def _astop(self, run_manager: AsyncCallbackManagerForChainRun) -> AddableD the stopped response. """ logger.warning("Stopping agent prematurely due to triggering stop condition") - output = self.agent_executor.agent.return_stopped_response( + output = self.agent_executor._action_agent.return_stopped_response( self.agent_executor.early_stopping_method, self.intermediate_steps, **self.inputs, diff --git a/libs/langchain/tests/unit_tests/agents/test_initialize.py b/libs/langchain/tests/unit_tests/agents/test_initialize.py index b83a549b53c1d..f898208a2923e 100644 --- a/libs/langchain/tests/unit_tests/agents/test_initialize.py +++ b/libs/langchain/tests/unit_tests/agents/test_initialize.py @@ -21,6 +21,9 @@ def test_initialize_agent_with_str_agent_type() -> None: fake_llm, "zero-shot-react-description", # type: ignore[arg-type] ) - assert agent_executor.agent._agent_type == AgentType.ZERO_SHOT_REACT_DESCRIPTION + assert ( + agent_executor._action_agent._agent_type + == AgentType.ZERO_SHOT_REACT_DESCRIPTION + ) assert isinstance(agent_executor.tags, list) assert "zero-shot-react-description" in agent_executor.tags From 8230ba47f3b839cb6e039289c5112422e745268f Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Thu, 22 Aug 2024 11:14:27 -0700 Subject: [PATCH 72/80] core[patch]: Improve some error messages and add another test for checking RunnableWithMessageHistory (#25209) Also add more useful error messages. --------- Co-authored-by: Eugene Yurtsev --- libs/core/langchain_core/_api/deprecation.py | 2 +- libs/core/langchain_core/prompts/chat.py | 6 +++--- libs/core/langchain_core/runnables/history.py | 4 +++- .../runnables/test_runnable_events_v2.py | 20 +++++++++++++++++++ 4 files changed, 27 insertions(+), 5 deletions(-) diff --git a/libs/core/langchain_core/_api/deprecation.py b/libs/core/langchain_core/_api/deprecation.py index b48215d0d6ca1..602732e745680 100644 --- a/libs/core/langchain_core/_api/deprecation.py +++ b/libs/core/langchain_core/_api/deprecation.py @@ -214,7 +214,7 @@ def warn_if_direct_instance( if not _obj_type: _obj_type = "attribute" if not _name: - raise ValueError() + raise ValueError(f"Field {obj} must have a name to be deprecated.") old_doc = obj.description def finalize(wrapper: Callable[..., Any], new_doc: str) -> T: diff --git a/libs/core/langchain_core/prompts/chat.py b/libs/core/langchain_core/prompts/chat.py index a53f1bb251cd0..ecd656a7ccc9e 100644 --- a/libs/core/langchain_core/prompts/chat.py +++ b/libs/core/langchain_core/prompts/chat.py @@ -551,13 +551,13 @@ def from_template( input_variables=input_variables, template=img_template ) else: - raise ValueError() + raise ValueError(f"Invalid image template: {tmpl}") prompt.append(img_template_obj) else: - raise ValueError() + raise ValueError(f"Invalid template: {tmpl}") return cls(prompt=prompt, **kwargs) else: - raise ValueError() + raise ValueError(f"Invalid template: {template}") @classmethod def from_template_file( diff --git a/libs/core/langchain_core/runnables/history.py b/libs/core/langchain_core/runnables/history.py index e169bd9a05d1d..e3197bec9000a 100644 --- a/libs/core/langchain_core/runnables/history.py +++ b/libs/core/langchain_core/runnables/history.py @@ -433,7 +433,9 @@ def _get_input_messages( # This occurs for chat models - since we batch inputs if isinstance(input_val[0], list): if len(input_val) != 1: - raise ValueError() + raise ValueError( + f"Expected a single list of messages. Got {input_val}." + ) return input_val[0] return list(input_val) else: diff --git a/libs/core/tests/unit_tests/runnables/test_runnable_events_v2.py b/libs/core/tests/unit_tests/runnables/test_runnable_events_v2.py index 6132c0efb0d90..59fc49ca5eab9 100644 --- a/libs/core/tests/unit_tests/runnables/test_runnable_events_v2.py +++ b/libs/core/tests/unit_tests/runnables/test_runnable_events_v2.py @@ -1889,6 +1889,25 @@ def get_by_session_id(session_id: str) -> BaseChatMessageHistory: input_messages_key="question", history_messages_key="history", ) + + # patch with_message_history._get_output_messages to listen for errors + # so we can raise them in this main thread + raised_errors = [] + + def collect_errors(fn): # type: ignore + nonlocal raised_errors + + def _get_output_messages(*args, **kwargs): # type: ignore + try: + return fn(*args, **kwargs) + except Exception as e: + raised_errors.append(e) + raise e + + return _get_output_messages + + old_ref = with_message_history._get_output_messages + with_message_history.__dict__["_get_output_messages"] = collect_errors(old_ref) await with_message_history.with_config( {"configurable": {"session_id": "session-123"}} ).ainvoke({"question": "hello"}) @@ -1911,6 +1930,7 @@ def get_by_session_id(session_id: str) -> BaseChatMessageHistory: AIMessage(content="world", id="ai4"), ] } + assert not raised_errors EXPECTED_EVENTS = [ From 5b9290a4492cc83c59b32a06a9d3005c694ce512 Mon Sep 17 00:00:00 2001 From: Ivan <58596654+istrebitel-1@users.noreply.github.com> Date: Thu, 22 Aug 2024 21:22:09 +0300 Subject: [PATCH 73/80] Fix UnionType type var replacement (#25566) [langchain_core] Fix UnionType type var replacement - Added types.UnionType to typing.Union mapping Type replacement cause `TypeError: 'type' object is not subscriptable` if any of union type comes as function `_py_38_safe_origin` return `types.UnionType` instead of `typing.Union` ```python >>> from types import UnionType >>> from typing import Union, get_origin >>> type_ = get_origin(str | None) >>> type_ >>> UnionType[(str, None)] Traceback (most recent call last): File "", line 1, in TypeError: 'type' object is not subscriptable >>> Union[(str, None)] typing.Optional[str] ``` --------- Co-authored-by: Chester Curme --- .../langchain_core/utils/function_calling.py | 6 ++++++ .../unit_tests/utils/test_function_calling.py | 16 ++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/libs/core/langchain_core/utils/function_calling.py b/libs/core/langchain_core/utils/function_calling.py index 28a1ab73513a5..b6c902872d49a 100644 --- a/libs/core/langchain_core/utils/function_calling.py +++ b/libs/core/langchain_core/utils/function_calling.py @@ -5,6 +5,7 @@ import collections import inspect import logging +import types import typing import uuid from typing import ( @@ -575,6 +576,10 @@ def _parse_google_docstring( def _py_38_safe_origin(origin: Type) -> Type: + origin_union_type_map: Dict[Type, Any] = ( + {types.UnionType: Union} if hasattr(types, "UnionType") else {} + ) + origin_map: Dict[Type, Any] = { dict: Dict, list: List, @@ -584,5 +589,6 @@ def _py_38_safe_origin(origin: Type) -> Type: collections.abc.Mapping: typing.Mapping, collections.abc.Sequence: typing.Sequence, collections.abc.MutableMapping: typing.MutableMapping, + **origin_union_type_map, } return cast(Type, origin_map.get(origin, origin)) diff --git a/libs/core/tests/unit_tests/utils/test_function_calling.py b/libs/core/tests/unit_tests/utils/test_function_calling.py index daa981d31434b..7c68cd24a2bf2 100644 --- a/libs/core/tests/unit_tests/utils/test_function_calling.py +++ b/libs/core/tests/unit_tests/utils/test_function_calling.py @@ -1,4 +1,5 @@ # mypy: disable-error-code="annotation-unchecked" +import sys from typing import ( Any, Callable, @@ -702,3 +703,18 @@ class Tool(typed_dict): with pytest.raises(TypeError): _convert_typed_dict_to_openai_function(Tool) + + +@pytest.mark.skipif( + sys.version_info < (3, 10), reason="Requires python version >= 3.10 to run." +) +def test_convert_union_type_py_39() -> None: + @tool + def magic_function(input: int | float) -> str: + """Compute a magic function.""" + pass + + result = convert_to_openai_function(magic_function) + assert result["parameters"]["properties"]["input"] == { + "anyOf": [{"type": "integer"}, {"type": "number"}] + } From 01ded5e2f974b1282619fe58aace5f445d7fa454 Mon Sep 17 00:00:00 2001 From: Eric Pinzur <2641606+epinzur@users.noreply.github.com> Date: Thu, 22 Aug 2024 20:27:16 +0200 Subject: [PATCH 74/80] community: add metadata filter to CassandraGraphVectorStore (#25663) - **Description:** - Added metadata filtering support to `langchain_community.graph_vectorstores.cassandra.CassandraGraphVectorStore` - Also fixed type conversion issues highlighted by mypy. - **Dependencies:** - `ragstack-ai-knowledge-store 0.2.0` (released July 23, 2024) --------- Co-authored-by: Chester Curme --- .../graph_vectorstores/cassandra.py | 29 ++++++++++++++++--- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/libs/community/langchain_community/graph_vectorstores/cassandra.py b/libs/community/langchain_community/graph_vectorstores/cassandra.py index 6fb04c60a13e9..33fc5ba8f9173 100644 --- a/libs/community/langchain_community/graph_vectorstores/cassandra.py +++ b/libs/community/langchain_community/graph_vectorstores/cassandra.py @@ -120,18 +120,31 @@ def from_documents( return store def similarity_search( - self, query: str, k: int = 4, **kwargs: Any + self, + query: str, + k: int = 4, + metadata_filter: dict[str, Any] = {}, + **kwargs: Any, ) -> List[Document]: embedding_vector = self._embedding.embed_query(query) return self.similarity_search_by_vector( embedding_vector, k=k, + metadata_filter=metadata_filter, ) def similarity_search_by_vector( - self, embedding: List[float], k: int = 4, **kwargs: Any + self, + embedding: List[float], + k: int = 4, + metadata_filter: dict[str, Any] = {}, + **kwargs: Any, ) -> List[Document]: - nodes = self.store.similarity_search(embedding, k=k) + nodes = self.store.similarity_search( + embedding, + k=k, + metadata_filter=metadata_filter, + ) return list(nodes_to_documents(nodes)) def traversal_search( @@ -140,9 +153,15 @@ def traversal_search( *, k: int = 4, depth: int = 1, + metadata_filter: dict[str, Any] = {}, **kwargs: Any, ) -> Iterable[Document]: - nodes = self.store.traversal_search(query, k=k, depth=depth) + nodes = self.store.traversal_search( + query, + k=k, + depth=depth, + metadata_filter=metadata_filter, + ) return nodes_to_documents(nodes) def mmr_traversal_search( @@ -155,6 +174,7 @@ def mmr_traversal_search( adjacent_k: int = 10, lambda_mult: float = 0.5, score_threshold: float = float("-inf"), + metadata_filter: dict[str, Any] = {}, **kwargs: Any, ) -> Iterable[Document]: nodes = self.store.mmr_traversal_search( @@ -165,5 +185,6 @@ def mmr_traversal_search( adjacent_k=adjacent_k, lambda_mult=lambda_mult, score_threshold=score_threshold, + metadata_filter=metadata_filter, ) return nodes_to_documents(nodes) From e5bb4cb646e57414c6b1d16a1e0b30cfbb92be37 Mon Sep 17 00:00:00 2001 From: yahya-mouman <103438582+yahya-mouman@users.noreply.github.com> Date: Thu, 22 Aug 2024 20:33:26 +0200 Subject: [PATCH 75/80] lagchain-pinecone: add id to similarity documents results (#25630) - **Description:** This change adds the ID field that's required in Pinecone to the result documents of the similarity search method. - **Issue:** Lack of document metadata namely the ID field - [x] **Add tests and docs**: If you're adding a new integration, please include 1. a test for the integration, preferably unit tests that do not rely on network access, 2. an example notebook showing its use. It lives in `docs/docs/integrations` directory. - [x] **Lint and test**: Run `make format`, `make lint` and `make test` from the root of the package(s) you've modified. See contribution guidelines for more: https://python.langchain.com/docs/contributing/ Additional guidelines: - Make sure optional dependencies are imported within a function. - Please do not add dependencies to pyproject.toml files (even optional ones) unless they are required for unit tests. - Most PRs should not touch more than one package. - Changes should be backwards compatible. - If you are adding something to community, do not re-import it in langchain. If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, ccurme, vbarda, hwchase17. --------- Co-authored-by: Chester Curme --- libs/partners/pinecone/langchain_pinecone/vectorstores.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/libs/partners/pinecone/langchain_pinecone/vectorstores.py b/libs/partners/pinecone/langchain_pinecone/vectorstores.py index c9adb085ece0d..05fbd9199541d 100644 --- a/libs/partners/pinecone/langchain_pinecone/vectorstores.py +++ b/libs/partners/pinecone/langchain_pinecone/vectorstores.py @@ -344,10 +344,13 @@ def similarity_search_by_vector_with_score( ) for res in results["matches"]: metadata = res["metadata"] + id = res.get("id") if self._text_key in metadata: text = metadata.pop(self._text_key) score = res["score"] - docs.append((Document(page_content=text, metadata=metadata), score)) + docs.append( + (Document(id=id, page_content=text, metadata=metadata), score) + ) else: logger.warning( f"Found document with no `{self._text_key}` key. Skipping." From fad6fc866a5a9a2347e1a18ce8c31b39e7a490d8 Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Thu, 22 Aug 2024 11:43:44 -0700 Subject: [PATCH 76/80] Rm DeepInfra Breakpoint Comment (#25206) tbh should rm the print staement too --- libs/community/langchain_community/chat_models/deepinfra.py | 1 - 1 file changed, 1 deletion(-) diff --git a/libs/community/langchain_community/chat_models/deepinfra.py b/libs/community/langchain_community/chat_models/deepinfra.py index ee670248a6716..37fef6763b77e 100644 --- a/libs/community/langchain_community/chat_models/deepinfra.py +++ b/libs/community/langchain_community/chat_models/deepinfra.py @@ -257,7 +257,6 @@ def _completion_with_retry(**kwargs: Any) -> Any: self._handle_status(response.status_code, response.text) return response except Exception as e: - # import pdb; pdb.set_trace() print("EX", e) # noqa: T201 raise From 6fbd53bc60ae880e3186ae634228b4da9a07f703 Mon Sep 17 00:00:00 2001 From: Sanjay Parajuli <41162183+xanjay@users.noreply.github.com> Date: Thu, 22 Aug 2024 20:55:24 +0200 Subject: [PATCH 77/80] docs: Update tool_calling.ipynb (#25434) **Description:** This part of the documentation didn't explain about the `required` property of function calling. I added additional line as a note. --------- Co-authored-by: Chester Curme --- docs/docs/how_to/tool_calling.ipynb | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/docs/how_to/tool_calling.ipynb b/docs/docs/how_to/tool_calling.ipynb index 5e38d15fbace8..8263604c5c72d 100644 --- a/docs/docs/how_to/tool_calling.ipynb +++ b/docs/docs/how_to/tool_calling.ipynb @@ -101,7 +101,9 @@ "\n", "### Pydantic class\n", "\n", - "You can equivalently define the schemas without the accompanying functions using [Pydantic](https://docs.pydantic.dev):" + "You can equivalently define the schemas without the accompanying functions using [Pydantic](https://docs.pydantic.dev).\n", + "\n", + "Note that all fields are `required` unless provided a default value.\n" ] }, { From 8bde04079b26379b874a10fa26c954192bf2e0fc Mon Sep 17 00:00:00 2001 From: ZhangShenao <15201440436@163.com> Date: Fri, 23 Aug 2024 02:59:40 +0800 Subject: [PATCH 78/80] patch[experimental] Fix start_index in `SemanticChunker` (#24761) - Cause chunks are joined by space, so they can't be found in text, and the final `start_index` is very possibility to be -1. - The simplest way is to use the natural index of the chunk as `start_index`. --- libs/experimental/langchain_experimental/text_splitter.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/libs/experimental/langchain_experimental/text_splitter.py b/libs/experimental/langchain_experimental/text_splitter.py index be3a795b06a61..2151a0e5ce9b7 100644 --- a/libs/experimental/langchain_experimental/text_splitter.py +++ b/libs/experimental/langchain_experimental/text_splitter.py @@ -262,14 +262,14 @@ def create_documents( _metadatas = metadatas or [{}] * len(texts) documents = [] for i, text in enumerate(texts): - index = -1 + start_index = 0 for chunk in self.split_text(text): metadata = copy.deepcopy(_metadatas[i]) if self._add_start_index: - index = text.find(chunk, index + 1) - metadata["start_index"] = index + metadata["start_index"] = start_index new_doc = Document(page_content=chunk, metadata=metadata) documents.append(new_doc) + start_index += len(chunk) return documents def split_documents(self, documents: Iterable[Document]) -> List[Document]: From 0f3fe44e4452a6b68d5f9443ce8f42cefeeaf714 Mon Sep 17 00:00:00 2001 From: basirsedighi <31671555+basirsedighi@users.noreply.github.com> Date: Thu, 22 Aug 2024 21:09:43 +0200 Subject: [PATCH 79/80] =?UTF-8?q?parsed=5Fjson=20is=20expected=20to=20be?= =?UTF-8?q?=20a=20list=20of=20dictionaries,=20but=20it=20seems=20to?= =?UTF-8?q?=E2=80=A6=20(#24018)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit parsed_json is expected to be a list of dictionaries, but it seems to… be a single dictionary instead. This is at libs/experimental/langchain_experimental/graph_transformers/llm.py process process_response Thank you for contributing to LangChain! - [ ] **Bugfix**: "experimental: bugfix" --------- Co-authored-by: based Co-authored-by: Chester Curme --- .../langchain_experimental/graph_transformers/llm.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/libs/experimental/langchain_experimental/graph_transformers/llm.py b/libs/experimental/langchain_experimental/graph_transformers/llm.py index 6b1c60ad83e47..7f1eb9b8fb325 100644 --- a/libs/experimental/langchain_experimental/graph_transformers/llm.py +++ b/libs/experimental/langchain_experimental/graph_transformers/llm.py @@ -729,6 +729,8 @@ def process_response( if not isinstance(raw_schema, str): raw_schema = raw_schema.content parsed_json = self.json_repair.loads(raw_schema) + if isinstance(parsed_json, dict): + parsed_json = [parsed_json] for rel in parsed_json: # Nodes need to be deduplicated using a set nodes_set.add((rel["head"], rel["head_type"])) From 7d13a2f958235774ec513fcd9c7ac421e5cbf29f Mon Sep 17 00:00:00 2001 From: CastaChick Date: Fri, 23 Aug 2024 04:46:25 +0900 Subject: [PATCH 80/80] core[patch]: add option to specify the chunk separator in `merge_message_runs` (#24783) **Description:** LLM will stop generating text even in the middle of a sentence if `finish_reason` is `length` (for OpenAI) or `stop_reason` is `max_tokens` (for Anthropic). To obtain longer outputs from LLM, we should call the message generation API multiple times and merge the results into the text to circumvent the API's output token limit. The extra line breaks forced by the `merge_message_runs` function when seamlessly merging messages can be annoying, so I added the option to specify the chunk separator. **Issue:** No corresponding issues. **Dependencies:** No dependencies required. **Twitter handle:** @hanama_chem https://x.com/hanama_chem --------- Co-authored-by: Bagatur <22008038+baskaryan@users.noreply.github.com> Co-authored-by: Bagatur --- .../document_loaders/langsmith.py | 2 +- libs/core/langchain_core/messages/utils.py | 17 ++++++++----- .../tests/unit_tests/messages/test_utils.py | 24 +++++++++++++++++++ 3 files changed, 36 insertions(+), 7 deletions(-) diff --git a/libs/core/langchain_core/document_loaders/langsmith.py b/libs/core/langchain_core/document_loaders/langsmith.py index 232da98ccf7c6..9da48851d0672 100644 --- a/libs/core/langchain_core/document_loaders/langsmith.py +++ b/libs/core/langchain_core/document_loaders/langsmith.py @@ -73,7 +73,7 @@ def __init__( inline_s3_urls: Whether to inline S3 URLs. Defaults to True. offset: The offset to start from. Defaults to 0. limit: The maximum number of examples to return. - filter: A structured fileter string to apply to the examples. + filter: A structured filter string to apply to the examples. client: LangSmith Client. If not provided will be initialized from below args. client_kwargs: Keyword args to pass to LangSmith client init. Should only be specified if ``client`` isn't. diff --git a/libs/core/langchain_core/messages/utils.py b/libs/core/langchain_core/messages/utils.py index c7d4d58a149bf..6f88fcbcf79a7 100644 --- a/libs/core/langchain_core/messages/utils.py +++ b/libs/core/langchain_core/messages/utils.py @@ -443,6 +443,8 @@ def filter_messages( @_runnable_support def merge_message_runs( messages: Union[Iterable[MessageLikeRepresentation], PromptValue], + *, + chunk_separator: str = "\n", ) -> List[BaseMessage]: """Merge consecutive Messages of the same type. @@ -451,13 +453,16 @@ def merge_message_runs( Args: messages: Sequence Message-like objects to merge. + chunk_separator: Specify the string to be inserted between message chunks. + Default is "\n". Returns: List of BaseMessages with consecutive runs of message types merged into single - messages. If two messages being merged both have string contents, the merged - content is a concatenation of the two strings with a new-line separator. If at - least one of the messages has a list of content blocks, the merged content is a - list of content blocks. + messages. By default, if two messages being merged both have string contents, + the merged content is a concatenation of the two strings with a new-line separator. + The separator inserted between message chunks can be controlled by specifying + any string with ``chunk_separator``. If at least one of the messages has a list of + content blocks, the merged content is a list of content blocks. Example: .. code-block:: python @@ -527,7 +532,7 @@ def merge_message_runs( and last_chunk.content and curr_chunk.content ): - last_chunk.content += "\n" + last_chunk.content += chunk_separator merged.append(_chunk_to_msg(last_chunk + curr_chunk)) return merged @@ -799,7 +804,7 @@ def list_token_counter(messages: Sequence[BaseMessage]) -> int: list_token_counter = token_counter # type: ignore[assignment] else: raise ValueError( - f"'token_counter' expected ot be a model that implements " + f"'token_counter' expected to be a model that implements " f"'get_num_tokens_from_messages()' or a function. Received object of type " f"{type(token_counter)}." ) diff --git a/libs/core/tests/unit_tests/messages/test_utils.py b/libs/core/tests/unit_tests/messages/test_utils.py index 3f25e02fb2381..56b8c0df7bed3 100644 --- a/libs/core/tests/unit_tests/messages/test_utils.py +++ b/libs/core/tests/unit_tests/messages/test_utils.py @@ -30,6 +30,30 @@ def test_merge_message_runs_str(msg_cls: Type[BaseMessage]) -> None: assert messages == messages_copy +@pytest.mark.parametrize("msg_cls", [HumanMessage, AIMessage, SystemMessage]) +def test_merge_message_runs_str_with_specified_separator( + msg_cls: Type[BaseMessage], +) -> None: + messages = [msg_cls("foo"), msg_cls("bar"), msg_cls("baz")] + messages_copy = [m.copy(deep=True) for m in messages] + expected = [msg_cls("foobarbaz")] + actual = merge_message_runs(messages, chunk_separator="") + assert actual == expected + assert messages == messages_copy + + +@pytest.mark.parametrize("msg_cls", [HumanMessage, AIMessage, SystemMessage]) +def test_merge_message_runs_str_without_separator( + msg_cls: Type[BaseMessage], +) -> None: + messages = [msg_cls("foo"), msg_cls("bar"), msg_cls("baz")] + messages_copy = [m.copy(deep=True) for m in messages] + expected = [msg_cls("foobarbaz")] + actual = merge_message_runs(messages, chunk_separator="") + assert actual == expected + assert messages == messages_copy + + def test_merge_message_runs_content() -> None: messages = [ AIMessage("foo", id="1"),