Skip to content

Commit

Permalink
Support 3.10 (#313)
Browse files Browse the repository at this point in the history
Support python 3.10 and fix unit tests
  • Loading branch information
Disiok authored Oct 14, 2024
1 parent 8c3757a commit f94eb8a
Show file tree
Hide file tree
Showing 11 changed files with 71 additions and 25 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/e2e_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
# You can use PyPy versions in python-version.
# For example, pypy-2.7 and pypy-3.8
matrix:
python-version: ["3.11", "3.12"]
python-version: ["3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v3
with:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/unit_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
# You can use PyPy versions in python-version.
# For example, pypy-2.7 and pypy-3.8
matrix:
python-version: ["3.11", "3.12"]
python-version: ["3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v3
with:
Expand Down
8 changes: 7 additions & 1 deletion llama_deploy/apiserver/config_parser.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
from enum import Enum
from pathlib import Path
from typing import Self, Annotated, Union
from typing import Annotated, Union
import sys

if sys.version_info >= (3, 11):
from typing import Self
else:
from typing_extensions import Self

import yaml
from pydantic import BaseModel, Field
Expand Down
6 changes: 3 additions & 3 deletions llama_deploy/cli/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import click

from .deploy import deploy
from .run import run
from .status import status
from llama_deploy.cli.deploy import deploy
from llama_deploy.cli.run import run
from llama_deploy.cli.status import status


@click.group(
Expand Down
2 changes: 1 addition & 1 deletion llama_deploy/cli/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import click

from .utils import request
from llama_deploy.cli.utils import request


@click.command()
Expand Down
2 changes: 1 addition & 1 deletion llama_deploy/cli/status.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import click


from .utils import request
from llama_deploy.cli.utils import request


@click.command()
Expand Down
44 changes: 41 additions & 3 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ maintainers = [
readme = "README.md"

[tool.poetry.dependencies]
python = ">=3.11,<4.0"
python = ">=3.10,<4.0"
fastapi = ">=0.109.1"
llama-index-core = "^0.11.10"
pydantic-settings = ">=2.0,<3.0"
Expand All @@ -38,6 +38,7 @@ redis = {version = "^5.0.7", optional = true}
types-aiobotocore = {version = "^2.14.0", optional = true, extras = ["sqs", "sns"]}
gitpython = "^3.1.43"
python-multipart = "^0.0.10"
typing_extensions = "^4.0.0"

[tool.poetry.extras]
kafka = ["aiokafka", "kafka-python-ng"]
Expand All @@ -49,6 +50,7 @@ awssqs = ["aiobotocore", "types-aiobotocore"]
pytest = "^8.2.2"
pytest-asyncio = "^0.23.7"
pytest-mock = "^3.14.0"
exceptiongroup = "^1.2.0"
ruff = "^0.4.7"
mypy = "^1.10.0"
aio-pika = "^9.4.2"
Expand Down
4 changes: 2 additions & 2 deletions tests/cli/test_deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
def test_deploy(runner: CliRunner, data_path: Path) -> None:
test_config_file = data_path / "deployment.yaml"
mocked_response = mock.MagicMock(status_code=200, json=lambda: {})
with mock.patch("llama_deploy.cli.deploy.request") as mocked_httpx:
with mock.patch("llama_deploy.cli.utils.httpx.request") as mocked_httpx:
mocked_httpx.return_value = mocked_response
result = runner.invoke(llamactl, ["-t", "5.0", "deploy", str(test_config_file)])

Expand All @@ -29,7 +29,7 @@ def test_deploy_failed(runner: CliRunner, data_path: Path) -> None:
mocked_response = mock.MagicMock(
status_code=401, json=lambda: {"detail": "Unauthorized!"}
)
with mock.patch("llama_deploy.cli.deploy.request") as mocked_httpx:
with mock.patch("llama_deploy.cli.utils.httpx.request") as mocked_httpx:
mocked_httpx.return_value = mocked_response
result = runner.invoke(llamactl, ["deploy", str(test_config_file)])
assert result.exit_code == 1
Expand Down
16 changes: 8 additions & 8 deletions tests/cli/test_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@

def test_run(runner: CliRunner) -> None:
mocked_response = mock.MagicMock(status_code=200, json=lambda: {})
with mock.patch("llama_deploy.cli.run.httpx") as mocked_httpx:
mocked_httpx.post.return_value = mocked_response
with mock.patch("httpx.post") as mocked_post:
mocked_post.return_value = mocked_response
result = runner.invoke(
llamactl, ["run", "-d", "deployment_name", "-s", "service_name"]
)
mocked_httpx.post.assert_called_with(
mocked_post.assert_called_with(
"http://localhost:4501/deployments/deployment_name/tasks/run",
verify=True,
json={"input": "{}", "agent_id": "service_name"},
Expand All @@ -25,17 +25,17 @@ def test_run_error(runner: CliRunner) -> None:
mocked_response = mock.MagicMock(
status_code=500, json=lambda: {"detail": "test error"}
)
with mock.patch("llama_deploy.cli.run.httpx") as mocked_httpx:
mocked_httpx.post.return_value = mocked_response
with mock.patch("httpx.post") as mocked_post:
mocked_post.return_value = mocked_response
result = runner.invoke(llamactl, ["run", "-d", "deployment_name"])
assert result.exit_code == 1
assert result.output == "Error: test error\n"


def test_run_args(runner: CliRunner) -> None:
mocked_response = mock.MagicMock(status_code=200, json=lambda: {})
with mock.patch("llama_deploy.cli.run.httpx") as mocked_httpx:
mocked_httpx.post.return_value = mocked_response
with mock.patch("httpx.post") as mocked_post:
mocked_post.return_value = mocked_response
result = runner.invoke(
llamactl,
[
Expand All @@ -50,7 +50,7 @@ def test_run_args(runner: CliRunner) -> None:
'"second value with spaces"',
],
)
mocked_httpx.post.assert_called_with(
mocked_post.assert_called_with(
"http://localhost:4501/deployments/deployment_name/tasks/run",
verify=True,
json={
Expand Down
6 changes: 3 additions & 3 deletions tests/cli/test_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def test_status_server_down(runner: CliRunner) -> None:

def test_status_unhealthy(runner: CliRunner) -> None:
mocked_response = mock.MagicMock(status_code=500)
with mock.patch("llama_deploy.cli.status.request") as mocked_httpx:
with mock.patch("llama_deploy.cli.utils.httpx.request") as mocked_httpx:
mocked_httpx.return_value = mocked_response
result = runner.invoke(llamactl, ["status"])
assert result.exit_code == 0
Expand All @@ -23,7 +23,7 @@ def test_status_unhealthy(runner: CliRunner) -> None:

def test_status(runner: CliRunner) -> None:
mocked_response = mock.MagicMock(status_code=200, json=lambda: {})
with mock.patch("llama_deploy.cli.status.request") as mocked_httpx:
with mock.patch("llama_deploy.cli.utils.httpx.request") as mocked_httpx:
mocked_httpx.return_value = mocked_response
result = runner.invoke(llamactl, ["status"])
assert result.exit_code == 0
Expand All @@ -36,7 +36,7 @@ def test_status(runner: CliRunner) -> None:
def test_status_with_deployments(runner: CliRunner) -> None:
mocked_response = mock.MagicMock(status_code=200)
mocked_response.json.return_value = {"deployments": ["foo", "bar"]}
with mock.patch("llama_deploy.cli.status.request") as mocked_httpx:
with mock.patch("llama_deploy.cli.utils.httpx.request") as mocked_httpx:
mocked_httpx.return_value = mocked_response
result = runner.invoke(llamactl, ["status"])
assert result.exit_code == 0
Expand Down

0 comments on commit f94eb8a

Please sign in to comment.