BaseLLM Class — langchain Architecture
Architecture documentation for the BaseLLM class in llms.py from the langchain codebase.
Entity Profile
Dependency Diagram
graph TD 76008743_94c2_792b_9e4f_e1278ae47565["BaseLLM"] 7deca977_9668_e807_4e77_0bc4519c0bfb["PromptValue"] 76008743_94c2_792b_9e4f_e1278ae47565 -->|extends| 7deca977_9668_e807_4e77_0bc4519c0bfb e444f60a_82db_8ae6_63c7_dc75096294ec["llms.py"] 76008743_94c2_792b_9e4f_e1278ae47565 -->|defined in| e444f60a_82db_8ae6_63c7_dc75096294ec 9dbfb50c_893c_4563_b4db_6783374675ee["_serialized()"] 76008743_94c2_792b_9e4f_e1278ae47565 -->|method| 9dbfb50c_893c_4563_b4db_6783374675ee 654d40dd_c333_4a88_bcd6_0d7ac2820770["OutputType()"] 76008743_94c2_792b_9e4f_e1278ae47565 -->|method| 654d40dd_c333_4a88_bcd6_0d7ac2820770 a7aab318_bfe0_a0e0_edf0_51778f410f99["_convert_input()"] 76008743_94c2_792b_9e4f_e1278ae47565 -->|method| a7aab318_bfe0_a0e0_edf0_51778f410f99 e4e417de_48b8_652f_a98e_96bf2ac2efb9["_get_ls_params()"] 76008743_94c2_792b_9e4f_e1278ae47565 -->|method| e4e417de_48b8_652f_a98e_96bf2ac2efb9 7fcd2060_4361_37eb_227f_2c40ad909233["invoke()"] 76008743_94c2_792b_9e4f_e1278ae47565 -->|method| 7fcd2060_4361_37eb_227f_2c40ad909233 83380da9_8709_2e6f_ecf4_1e88757580fa["ainvoke()"] 76008743_94c2_792b_9e4f_e1278ae47565 -->|method| 83380da9_8709_2e6f_ecf4_1e88757580fa ec40beb4_e7fe_ff24_8f09_8fdc5556bed6["batch()"] 76008743_94c2_792b_9e4f_e1278ae47565 -->|method| ec40beb4_e7fe_ff24_8f09_8fdc5556bed6 e6660fe6_14e6_584e_aad4_3ebb2271d015["abatch()"] 76008743_94c2_792b_9e4f_e1278ae47565 -->|method| e6660fe6_14e6_584e_aad4_3ebb2271d015 e46085fd_99f0_971d_3d9a_1cd202b72d44["stream()"] 76008743_94c2_792b_9e4f_e1278ae47565 -->|method| e46085fd_99f0_971d_3d9a_1cd202b72d44 974215fb_0217_49ef_2034_3bc77b1a795b["astream()"] 76008743_94c2_792b_9e4f_e1278ae47565 -->|method| 974215fb_0217_49ef_2034_3bc77b1a795b e180eb37_127d_0088_05e7_a91d4dec7eb3["_generate()"] 76008743_94c2_792b_9e4f_e1278ae47565 -->|method| e180eb37_127d_0088_05e7_a91d4dec7eb3 cee99796_51df_4a14_cd70_308ac8fe0e19["_agenerate()"] 76008743_94c2_792b_9e4f_e1278ae47565 -->|method| cee99796_51df_4a14_cd70_308ac8fe0e19
Relationship Graph
Source Code
libs/core/langchain_core/language_models/llms.py lines 292–1398
class BaseLLM(BaseLanguageModel[str], ABC):
"""Base LLM abstract interface.
It should take in a prompt and return a string.
"""
model_config = ConfigDict(
arbitrary_types_allowed=True,
)
@functools.cached_property
def _serialized(self) -> dict[str, Any]:
# self is always a Serializable object in this case, thus the result is
# guaranteed to be a dict since dumps uses the default callback, which uses
# obj.to_json which always returns TypedDict subclasses
return cast("dict[str, Any]", dumpd(self))
# --- Runnable methods ---
@property
@override
def OutputType(self) -> type[str]:
"""Get the input type for this `Runnable`."""
return str
def _convert_input(self, model_input: LanguageModelInput) -> PromptValue:
if isinstance(model_input, PromptValue):
return model_input
if isinstance(model_input, str):
return StringPromptValue(text=model_input)
if isinstance(model_input, Sequence):
return ChatPromptValue(messages=convert_to_messages(model_input))
msg = (
f"Invalid input type {type(model_input)}. "
"Must be a PromptValue, str, or list of BaseMessages."
)
raise ValueError(msg)
def _get_ls_params(
self,
stop: list[str] | None = None,
**kwargs: Any,
) -> LangSmithParams:
"""Get standard params for tracing."""
# get default provider from class name
default_provider = self.__class__.__name__
default_provider = default_provider.removesuffix("LLM")
default_provider = default_provider.lower()
ls_params = LangSmithParams(ls_provider=default_provider, ls_model_type="llm")
if stop:
ls_params["ls_stop"] = stop
# model
if "model" in kwargs and isinstance(kwargs["model"], str):
ls_params["ls_model_name"] = kwargs["model"]
elif hasattr(self, "model") and isinstance(self.model, str):
ls_params["ls_model_name"] = self.model
elif hasattr(self, "model_name") and isinstance(self.model_name, str):
ls_params["ls_model_name"] = self.model_name
# temperature
if "temperature" in kwargs and isinstance(kwargs["temperature"], float):
ls_params["ls_temperature"] = kwargs["temperature"]
elif hasattr(self, "temperature") and isinstance(self.temperature, float):
ls_params["ls_temperature"] = self.temperature
# max_tokens
if "max_tokens" in kwargs and isinstance(kwargs["max_tokens"], int):
ls_params["ls_max_tokens"] = kwargs["max_tokens"]
elif hasattr(self, "max_tokens") and isinstance(self.max_tokens, int):
ls_params["ls_max_tokens"] = self.max_tokens
return ls_params
@override
def invoke(
self,
input: LanguageModelInput,
config: RunnableConfig | None = None,
*,
Extends
Source
Frequently Asked Questions
What is the BaseLLM class?
BaseLLM is a class in the langchain codebase, defined in libs/core/langchain_core/language_models/llms.py.
Where is BaseLLM defined?
BaseLLM is defined in libs/core/langchain_core/language_models/llms.py at line 292.
What does BaseLLM extend?
BaseLLM extends PromptValue.
Analyze Your Own Codebase
Get architecture documentation, dependency graphs, and domain analysis for your codebase in minutes.
Try Supermodel Free