Skip to content

Commit

Permalink
cr
Browse files Browse the repository at this point in the history
  • Loading branch information
baskaryan committed Jan 3, 2024
1 parent c87f870 commit d19a665
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 11 deletions.
11 changes: 0 additions & 11 deletions libs/core/langchain_core/language_models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,17 +61,6 @@ class BaseLanguageModel(
"""Abstract base class for interfacing with language models.
All language model wrappers inherit from BaseLanguageModel.
Exposes three main methods:
- generate_prompt: generate language model outputs for a sequence of prompt
values. A prompt value is a model input that can be converted to any language
model input format (string or messages).
- predict: pass in a single string to a language model and return a string
prediction.
- predict_messages: pass in a sequence of BaseMessages (corresponding to a single
model call) to a language model and return a BaseMessage prediction.
Each of these has an equivalent asynchronous method.
"""

@property
Expand Down
4 changes: 4 additions & 0 deletions libs/core/langchain_core/language_models/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -717,6 +717,7 @@ def call_as_llm(
) -> str:
return self.predict(message, stop=stop, **kwargs)

@deprecated("0.1.0", alternative="invoke", removal="0.2.0")
def predict(
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
) -> str:
Expand All @@ -730,6 +731,7 @@ def predict(
else:
raise ValueError("Cannot use predict when output is not a string.")

@deprecated("0.1.0", alternative="invoke", removal="0.2.0")
def predict_messages(
self,
messages: List[BaseMessage],
Expand All @@ -743,6 +745,7 @@ def predict_messages(
_stop = list(stop)
return self(messages, stop=_stop, **kwargs)

@deprecated("0.1.0", alternative="ainvoke", removal="0.2.0")
async def apredict(
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
) -> str:
Expand All @@ -758,6 +761,7 @@ async def apredict(
else:
raise ValueError("Cannot use predict when output is not a string.")

@deprecated("0.1.0", alternative="ainvoke", removal="0.2.0")
async def apredict_messages(
self,
messages: List[BaseMessage],
Expand Down
5 changes: 5 additions & 0 deletions libs/core/langchain_core/language_models/llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
wait_exponential,
)

from langchain_core._api import deprecated
from langchain_core.callbacks import (
AsyncCallbackManager,
AsyncCallbackManagerForLLMRun,
Expand Down Expand Up @@ -976,6 +977,7 @@ async def _call_async(
)
return result.generations[0][0].text

@deprecated("0.1.0", alternative="invoke", removal="0.2.0")
def predict(
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
) -> str:
Expand All @@ -985,6 +987,7 @@ def predict(
_stop = list(stop)
return self(text, stop=_stop, **kwargs)

@deprecated("0.1.0", alternative="invoke", removal="0.2.0")
def predict_messages(
self,
messages: List[BaseMessage],
Expand All @@ -1000,6 +1003,7 @@ def predict_messages(
content = self(text, stop=_stop, **kwargs)
return AIMessage(content=content)

@deprecated("0.1.0", alternative="ainvoke", removal="0.2.0")
async def apredict(
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
) -> str:
Expand All @@ -1009,6 +1013,7 @@ async def apredict(
_stop = list(stop)
return await self._call_async(text, stop=_stop, **kwargs)

@deprecated("0.1.0", alternative="ainvoke", removal="0.2.0")
async def apredict_messages(
self,
messages: List[BaseMessage],
Expand Down

0 comments on commit d19a665

Please sign in to comment.