Skip to content

LLMConfig

autogen.llm_config.LLMConfig #

LLMConfig(**kwargs)
Source code in autogen/llm_config.py
def __init__(self, **kwargs: Any) -> None:
    outside_properties = list((self._get_base_model_class()).model_json_schema()["properties"].keys())
    outside_properties.remove("config_list")

    if "config_list" in kwargs and isinstance(kwargs["config_list"], dict):
        kwargs["config_list"] = [kwargs["config_list"]]

    modified_kwargs = (
        kwargs
        if "config_list" in kwargs
        else {
            **{
                "config_list": [
                    {k: v for k, v in kwargs.items() if k not in outside_properties},
                ]
            },
            **{k: v for k, v in kwargs.items() if k in outside_properties},
        }
    )

    modified_kwargs["config_list"] = [
        _add_default_api_type(v) if isinstance(v, dict) else v for v in modified_kwargs["config_list"]
    ]
    for x in ["max_tokens", "top_p"]:
        if x in modified_kwargs:
            modified_kwargs["config_list"] = [{**v, x: modified_kwargs[x]} for v in modified_kwargs["config_list"]]
            modified_kwargs.pop(x)

    self._model = self._get_base_model_class()(**modified_kwargs)

get_current_llm_config classmethod #

get_current_llm_config(llm_config=None)
Source code in autogen/llm_config.py
@classmethod
def get_current_llm_config(cls, llm_config: "Optional[LLMConfig]" = None) -> "Optional[LLMConfig]":
    if llm_config is not None:
        return llm_config
    try:
        return (LLMConfig._current_llm_config.get()).copy()
    except LookupError:
        return None

from_json classmethod #

from_json(*, env=None, path=None, file_location=None, **kwargs)
Source code in autogen/llm_config.py
@classmethod
def from_json(
    cls,
    *,
    env: Optional[str] = None,
    path: Optional[Union[str, Path]] = None,
    file_location: Optional[str] = None,
    **kwargs: Any,
) -> "LLMConfig":
    from .oai.openai_utils import config_list_from_json

    if env is None and path is None:
        raise ValueError("Either 'env' or 'path' must be provided")
    if env is not None and path is not None:
        raise ValueError("Only one of 'env' or 'path' can be provided")

    config_list = config_list_from_json(
        env_or_file=env if env is not None else str(path), file_location=file_location
    )
    return LLMConfig(config_list=config_list, **kwargs)

where #

where(*, exclude=False, **kwargs)
Source code in autogen/llm_config.py
def where(self, *, exclude: bool = False, **kwargs: Any) -> "LLMConfig":
    from .oai.openai_utils import filter_config

    filtered_config_list = filter_config(config_list=self.config_list, filter_dict=kwargs, exclude=exclude)
    if len(filtered_config_list) == 0:
        raise ValueError(f"No config found that satisfies the filter criteria: {kwargs}")

    return LLMConfig(config_list=filtered_config_list)

model_dump #

model_dump(*args, exclude_none=True, **kwargs)
Source code in autogen/llm_config.py
def model_dump(self, *args: Any, exclude_none: bool = True, **kwargs: Any) -> dict[str, Any]:
    d = self._model.model_dump(*args, exclude_none=exclude_none, **kwargs)
    return {k: v for k, v in d.items() if not (isinstance(v, list) and len(v) == 0)}

model_dump_json #

model_dump_json(*args, exclude_none=True, **kwargs)
Source code in autogen/llm_config.py
def model_dump_json(self, *args: Any, exclude_none: bool = True, **kwargs: Any) -> str:
    # return self._model.model_dump_json(*args, exclude_none=exclude_none, **kwargs)
    d = self.model_dump(*args, exclude_none=exclude_none, **kwargs)
    return json.dumps(d)

model_validate #

model_validate(*args, **kwargs)
Source code in autogen/llm_config.py
def model_validate(self, *args: Any, **kwargs: Any) -> Any:
    return self._model.model_validate(*args, **kwargs)

model_validate_json #

model_validate_json(*args, **kwargs)
Source code in autogen/llm_config.py
@functools.wraps(BaseModel.model_validate_json)
def model_validate_json(self, *args: Any, **kwargs: Any) -> Any:
    return self._model.model_validate_json(*args, **kwargs)

model_validate_strings #

model_validate_strings(*args, **kwargs)
Source code in autogen/llm_config.py
@functools.wraps(BaseModel.model_validate_strings)
def model_validate_strings(self, *args: Any, **kwargs: Any) -> Any:
    return self._model.model_validate_strings(*args, **kwargs)

get #

get(key, default=None)
Source code in autogen/llm_config.py
def get(self, key: str, default: Optional[Any] = None) -> Any:
    val = getattr(self._model, key, default)
    return val

copy #

copy()
Source code in autogen/llm_config.py
def copy(self) -> "LLMConfig":
    return self.__copy__()

deepcopy #

deepcopy(memo=None)
Source code in autogen/llm_config.py
def deepcopy(self, memo: Optional[dict[int, Any]] = None) -> "LLMConfig":
    return self.__deepcopy__(memo)

items #

items()
Source code in autogen/llm_config.py
def items(self) -> Iterable[tuple[str, Any]]:
    d = self.model_dump()
    return d.items()

keys #

keys()
Source code in autogen/llm_config.py
def keys(self) -> Iterable[str]:
    d = self.model_dump()
    return d.keys()

values #

values()
Source code in autogen/llm_config.py
def values(self) -> Iterable[Any]:
    d = self.model_dump()
    return d.values()