Skip to content

Commit

Permalink
mark serialization as broken and add get_available_models test
Browse files Browse the repository at this point in the history
  • Loading branch information
mattf committed May 14, 2024
1 parent caea7a2 commit e7bd7cd
Showing 1 changed file with 12 additions and 4 deletions.
16 changes: 12 additions & 4 deletions libs/ai-endpoints/tests/integration_tests/test_chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from langchain_core.load.load import loads
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, SystemMessage

from langchain_nvidia_ai_endpoints._common import Model
from langchain_nvidia_ai_endpoints.chat_models import ChatNVIDIA

#
Expand Down Expand Up @@ -320,10 +321,10 @@ def test_ai_endpoints_invoke_top_p_positive(chat_model: str, mode: dict) -> None
assert result0.content != result1.content


def test_serialize_chatnvidia() -> None:
model = loads(
dumps(ChatNVIDIA()), valid_namespaces=["langchain_nvidia_ai_endpoints"]
)
@pytest.mark.skip("serialization support is broken, needs attention")
def test_serialize_chatnvidia(chat_model: str, mode: dict) -> None:
llm = ChatNVIDIA(model=chat_model).mode(**mode)
model = loads(dumps(llm), valid_namespaces=["langchain_nvidia_ai_endpoints"])
result = model.invoke("What is there if there is nothing?")
assert isinstance(result.content, str)

Expand All @@ -335,3 +336,10 @@ def test_chat_available_models(mode: dict) -> None:
# we don't have type information for local nim endpoints
if mode.get("mode", None) != "nim":
assert all(model.model_type is not None for model in models)


def test_chat_get_available_models(mode: dict) -> None:
models = ChatNVIDIA.get_available_models(**mode)
assert len(models) > 0
for model in models:
assert isinstance(model, Model)

0 comments on commit e7bd7cd

Please sign in to comment.