From 1e5db5664f6d5e492d23191bb4639efe72fb1af9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 1 Aug 2023 06:19:44 +0000 Subject: [PATCH 1/3] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/flake8: 6.0.0 → 6.1.0](https://github.com/PyCQA/flake8/compare/6.0.0...6.1.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ef73da5..52c432b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,7 +26,7 @@ repos: hooks: - id: isort - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + rev: 6.1.0 hooks: - id: flake8 additional_dependencies: From 2ce1123912de43aed0299f999937c79cfe296b69 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 1 Aug 2023 14:29:15 +0100 Subject: [PATCH 2/3] tests: faster flake8 --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 52c432b..615679d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,6 +29,7 @@ repos: rev: 6.1.0 hooks: - id: flake8 + args: [-j8] additional_dependencies: - flake8-broken-line - flake8-bugbear From e30ffb977c614425f4ebb21c241ec6a3f8766bce Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 1 Aug 2023 14:29:36 +0100 Subject: [PATCH 3/3] tests: fix flake8 --- ebd-all-minilm/routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ebd-all-minilm/routes.py b/ebd-all-minilm/routes.py index 31dbc05..90b8884 100644 --- a/ebd-all-minilm/routes.py +++ b/ebd-all-minilm/routes.py @@ -57,7 +57,7 @@ async def embeddings(body: EmbeddingsInput): "/engines/text-embedding-ada-002/embeddings", response_model=EmbeddingsResponse ) async def embeddings_openai(body: EmbeddingsInput): - if len(body.input) > 0 and type(body.input[0]) == list: + if len(body.input) > 0 and isinstance(body.input[0], list): encoding = tiktoken.model.encoding_for_model("text-embedding-ada-002") texts = encoding.decode_batch(body.input) else: