_ConfigurableModel Class — langchain Architecture
Architecture documentation for the _ConfigurableModel class in base.py from the langchain codebase.
Entity Profile
Dependency Diagram
graph TD 3bd3bde1_00db_3d81_56f9_2c8765313b08["_ConfigurableModel"] f2bfd5e3_62a8_3f9e_12f8_70dde33090b6["base.py"] 3bd3bde1_00db_3d81_56f9_2c8765313b08 -->|defined in| f2bfd5e3_62a8_3f9e_12f8_70dde33090b6 2f607814_d771_29b8_b546_7d7b80c4b49e["__init__()"] 3bd3bde1_00db_3d81_56f9_2c8765313b08 -->|method| 2f607814_d771_29b8_b546_7d7b80c4b49e 74fee3ec_c747_60b4_3e2d_5e53cb64c625["__getattr__()"] 3bd3bde1_00db_3d81_56f9_2c8765313b08 -->|method| 74fee3ec_c747_60b4_3e2d_5e53cb64c625 ca517f0e_ec31_e5e8_4b28_254f1a9d5c22["_model()"] 3bd3bde1_00db_3d81_56f9_2c8765313b08 -->|method| ca517f0e_ec31_e5e8_4b28_254f1a9d5c22 08647072_b8bd_3cc5_28db_19dad0d82df7["_model_params()"] 3bd3bde1_00db_3d81_56f9_2c8765313b08 -->|method| 08647072_b8bd_3cc5_28db_19dad0d82df7 edf05bd3_453c_9c0f_cb83_a4b3f1c0e797["with_config()"] 3bd3bde1_00db_3d81_56f9_2c8765313b08 -->|method| edf05bd3_453c_9c0f_cb83_a4b3f1c0e797 91c2bac6_7ce1_c64d_c6ce_6901b596ea9d["InputType()"] 3bd3bde1_00db_3d81_56f9_2c8765313b08 -->|method| 91c2bac6_7ce1_c64d_c6ce_6901b596ea9d b2ff7157_8a39_4b92_2f5c_b1eb7c6aaa9c["invoke()"] 3bd3bde1_00db_3d81_56f9_2c8765313b08 -->|method| b2ff7157_8a39_4b92_2f5c_b1eb7c6aaa9c b123ada8_d658_6351_134d_7a5af3c40bc6["ainvoke()"] 3bd3bde1_00db_3d81_56f9_2c8765313b08 -->|method| b123ada8_d658_6351_134d_7a5af3c40bc6 68f2bfe7_dd93_d7ee_450c_f519c0641c01["stream()"] 3bd3bde1_00db_3d81_56f9_2c8765313b08 -->|method| 68f2bfe7_dd93_d7ee_450c_f519c0641c01 7223aa08_3b3f_5a19_61c1_309b25987bf5["astream()"] 3bd3bde1_00db_3d81_56f9_2c8765313b08 -->|method| 7223aa08_3b3f_5a19_61c1_309b25987bf5 f9b379ab_d413_7eae_92e2_e58bc89ad431["batch()"] 3bd3bde1_00db_3d81_56f9_2c8765313b08 -->|method| f9b379ab_d413_7eae_92e2_e58bc89ad431 fa734c17_b74b_2fe8_d54d_8f232ea410a1["abatch()"] 3bd3bde1_00db_3d81_56f9_2c8765313b08 -->|method| fa734c17_b74b_2fe8_d54d_8f232ea410a1 cfd95ece_8a45_9ef5_df5b_7b5c29f791e7["batch_as_completed()"] 3bd3bde1_00db_3d81_56f9_2c8765313b08 -->|method| cfd95ece_8a45_9ef5_df5b_7b5c29f791e7
Relationship Graph
Source Code
libs/langchain/langchain_classic/chat_models/base.py lines 646–1048
class _ConfigurableModel(Runnable[LanguageModelInput, Any]):
def __init__(
self,
*,
default_config: dict | None = None,
configurable_fields: Literal["any"] | list[str] | tuple[str, ...] = "any",
config_prefix: str = "",
queued_declarative_operations: Sequence[tuple[str, tuple, dict]] = (),
) -> None:
self._default_config: dict = default_config or {}
self._configurable_fields: Literal["any"] | list[str] = (
configurable_fields
if configurable_fields == "any"
else list(configurable_fields)
)
self._config_prefix = (
config_prefix + "_"
if config_prefix and not config_prefix.endswith("_")
else config_prefix
)
self._queued_declarative_operations: list[tuple[str, tuple, dict]] = list(
queued_declarative_operations,
)
def __getattr__(self, name: str) -> Any:
if name in _DECLARATIVE_METHODS:
# Declarative operations that cannot be applied until after an actual model
# object is instantiated. So instead of returning the actual operation,
# we record the operation and its arguments in a queue. This queue is
# then applied in order whenever we actually instantiate the model (in
# self._model()).
def queue(*args: Any, **kwargs: Any) -> _ConfigurableModel:
queued_declarative_operations = list(
self._queued_declarative_operations,
)
queued_declarative_operations.append((name, args, kwargs))
return _ConfigurableModel(
default_config=dict(self._default_config),
configurable_fields=list(self._configurable_fields)
if isinstance(self._configurable_fields, list)
else self._configurable_fields,
config_prefix=self._config_prefix,
queued_declarative_operations=queued_declarative_operations,
)
return queue
if self._default_config and (model := self._model()) and hasattr(model, name):
return getattr(model, name)
msg = f"{name} is not a BaseChatModel attribute"
if self._default_config:
msg += " and is not implemented on the default model"
msg += "."
raise AttributeError(msg)
def _model(self, config: RunnableConfig | None = None) -> Runnable:
params = {**self._default_config, **self._model_params(config)}
model = _init_chat_model_helper(**params)
for name, args, kwargs in self._queued_declarative_operations:
model = getattr(model, name)(*args, **kwargs)
return model
def _model_params(self, config: RunnableConfig | None) -> dict:
config = ensure_config(config)
model_params = {
k.removeprefix(self._config_prefix): v
for k, v in config.get("configurable", {}).items()
if k.startswith(self._config_prefix)
}
if self._configurable_fields != "any":
model_params = {
k: v for k, v in model_params.items() if k in self._configurable_fields
}
return model_params
def with_config(
self,
config: RunnableConfig | None = None,
**kwargs: Any,
) -> _ConfigurableModel:
"""Bind config to a `Runnable`, returning a new `Runnable`."""
config = RunnableConfig(**(config or {}), **cast("RunnableConfig", kwargs))
Source
Frequently Asked Questions
What is the _ConfigurableModel class?
_ConfigurableModel is a class in the langchain codebase, defined in libs/langchain/langchain_classic/chat_models/base.py.
Where is _ConfigurableModel defined?
_ConfigurableModel is defined in libs/langchain/langchain_classic/chat_models/base.py at line 646.
Analyze Your Own Codebase
Get architecture documentation, dependency graphs, and domain analysis for your codebase in minutes.
Try Supermodel Free