Home / Class/ _ConfigurableModel Class — langchain Architecture

_ConfigurableModel Class — langchain Architecture

Architecture documentation for the _ConfigurableModel class in base.py from the langchain codebase.

Entity Profile

Dependency Diagram

graph TD
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b["_ConfigurableModel"]
  bf41f4e9_ecaa_47e8_2d2b_40fcfdc2264a["base.py"]
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b -->|defined in| bf41f4e9_ecaa_47e8_2d2b_40fcfdc2264a
  ebf0ca5c_1a07_43db_0ce1_3efe014d9452["__init__()"]
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b -->|method| ebf0ca5c_1a07_43db_0ce1_3efe014d9452
  57dd2069_a13e_bbba_e3dd_0a16b9d8d5c2["__getattr__()"]
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b -->|method| 57dd2069_a13e_bbba_e3dd_0a16b9d8d5c2
  6c53e804_ca90_2550_60fe_a652f0346a3f["_model()"]
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b -->|method| 6c53e804_ca90_2550_60fe_a652f0346a3f
  c96e56d1_4d51_83d6_5ba8_da3a805d6e85["_model_params()"]
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b -->|method| c96e56d1_4d51_83d6_5ba8_da3a805d6e85
  c3fd65ff_3d59_26fd_d90b_9cbf5e62e16c["with_config()"]
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b -->|method| c3fd65ff_3d59_26fd_d90b_9cbf5e62e16c
  52427671_68d9_8994_720d_9c8b46a2b8ef["InputType()"]
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b -->|method| 52427671_68d9_8994_720d_9c8b46a2b8ef
  0357ecb4_528d_27b1_f829_9d5043640a1c["invoke()"]
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b -->|method| 0357ecb4_528d_27b1_f829_9d5043640a1c
  b3009ee8_747a_f49b_1b3c_23eb3507b24b["ainvoke()"]
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b -->|method| b3009ee8_747a_f49b_1b3c_23eb3507b24b
  e81ca58b_c6bf_3870_b0c0_15ce8990a735["stream()"]
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b -->|method| e81ca58b_c6bf_3870_b0c0_15ce8990a735
  3588ef38_16c4_8f64_2d9d_b187e5f81674["astream()"]
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b -->|method| 3588ef38_16c4_8f64_2d9d_b187e5f81674
  bf8aca61_d2e1_b225_5502_4c82fc944f4d["batch()"]
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b -->|method| bf8aca61_d2e1_b225_5502_4c82fc944f4d
  4d113b85_75bd_bc52_0d5a_0b06c1dea95a["abatch()"]
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b -->|method| 4d113b85_75bd_bc52_0d5a_0b06c1dea95a
  bd823327_5cef_68bc_ca22_5fd0f9f238c1["batch_as_completed()"]
  3eb71182_6a88_2f5e_c4e9_e98a7648f40b -->|method| bd823327_5cef_68bc_ca22_5fd0f9f238c1

Relationship Graph

Source Code

libs/langchain_v1/langchain/chat_models/base.py lines 596–994

class _ConfigurableModel(Runnable[LanguageModelInput, Any]):
    def __init__(
        self,
        *,
        default_config: dict[str, Any] | None = None,
        configurable_fields: Literal["any"] | list[str] | tuple[str, ...] = "any",
        config_prefix: str = "",
        queued_declarative_operations: Sequence[tuple[str, tuple[Any, ...], dict[str, Any]]] = (),
    ) -> None:
        self._default_config: dict[str, Any] = default_config or {}
        self._configurable_fields: Literal["any"] | list[str] = (
            "any" if configurable_fields == "any" else list(configurable_fields)
        )
        self._config_prefix = (
            config_prefix + "_"
            if config_prefix and not config_prefix.endswith("_")
            else config_prefix
        )
        self._queued_declarative_operations: list[tuple[str, tuple[Any, ...], dict[str, Any]]] = (
            list(
                queued_declarative_operations,
            )
        )

    def __getattr__(self, name: str) -> Any:
        if name in _DECLARATIVE_METHODS:
            # Declarative operations that cannot be applied until after an actual model
            # object is instantiated. So instead of returning the actual operation,
            # we record the operation and its arguments in a queue. This queue is
            # then applied in order whenever we actually instantiate the model (in
            # self._model()).
            def queue(*args: Any, **kwargs: Any) -> _ConfigurableModel:
                queued_declarative_operations = list(
                    self._queued_declarative_operations,
                )
                queued_declarative_operations.append((name, args, kwargs))
                return _ConfigurableModel(
                    default_config=dict(self._default_config),
                    configurable_fields=list(self._configurable_fields)
                    if isinstance(self._configurable_fields, list)
                    else self._configurable_fields,
                    config_prefix=self._config_prefix,
                    queued_declarative_operations=queued_declarative_operations,
                )

            return queue
        if self._default_config and (model := self._model()) and hasattr(model, name):
            return getattr(model, name)
        msg = f"{name} is not a BaseChatModel attribute"
        if self._default_config:
            msg += " and is not implemented on the default model"
        msg += "."
        raise AttributeError(msg)

    def _model(self, config: RunnableConfig | None = None) -> Runnable[Any, Any]:
        params = {**self._default_config, **self._model_params(config)}
        model = _init_chat_model_helper(**params)
        for name, args, kwargs in self._queued_declarative_operations:
            model = getattr(model, name)(*args, **kwargs)
        return model

    def _model_params(self, config: RunnableConfig | None) -> dict[str, Any]:
        config = ensure_config(config)
        model_params = {
            _remove_prefix(k, self._config_prefix): v
            for k, v in config.get("configurable", {}).items()
            if k.startswith(self._config_prefix)
        }
        if self._configurable_fields != "any":
            model_params = {k: v for k, v in model_params.items() if k in self._configurable_fields}
        return model_params

    def with_config(
        self,
        config: RunnableConfig | None = None,
        **kwargs: Any,
    ) -> _ConfigurableModel:
        config = RunnableConfig(**(config or {}), **cast("RunnableConfig", kwargs))
        # Ensure config is not None after creation
        config = ensure_config(config)
        model_params = self._model_params(config)

Frequently Asked Questions

What is the _ConfigurableModel class?
_ConfigurableModel is a class in the langchain codebase, defined in libs/langchain_v1/langchain/chat_models/base.py.
Where is _ConfigurableModel defined?
_ConfigurableModel is defined in libs/langchain_v1/langchain/chat_models/base.py at line 596.

Analyze Your Own Codebase

Get architecture documentation, dependency graphs, and domain analysis for your codebase in minutes.

Try Supermodel Free