Skip to content

WebSearchPreviewTool

autogen.tools.experimental.WebSearchPreviewTool #

WebSearchPreviewTool(*, llm_config, search_context_size='medium', user_location=None, instructions=None, text_format=None)

Bases: Tool

WebSearchPreviewTool is a tool that uses OpenAI's web_search_preview tool to perform a search.

Initialize the WebSearchPreviewTool.

PARAMETER DESCRIPTION
llm_config

The LLM configuration to use. This should be a dictionary containing the model name and other parameters.

TYPE: Union[LLMConfig, dict[str, Any]]

search_context_size

The size of the search context. One of low, medium, or high. medium is the default.

TYPE: Literal['low', 'medium', 'high'] DEFAULT: 'medium'

user_location

The location of the user. This should be a dictionary containing the city, country, region, and timezone.

TYPE: Optional[dict[str, str]] DEFAULT: None

instructions

Inserts a system (or developer) message as the first item in the model's context.

TYPE: Optional[str] DEFAULT: None

text_format

The format of the text to be returned. This should be a subclass of BaseModel. The default is None, which means the text will be returned as a string.

TYPE: Optional[Type[BaseModel]] DEFAULT: None

Source code in autogen/tools/experimental/web_search_preview/web_search_preview.py
def __init__(
    self,
    *,
    llm_config: Union[LLMConfig, dict[str, Any]],
    search_context_size: Literal["low", "medium", "high"] = "medium",
    user_location: Optional[dict[str, str]] = None,
    instructions: Optional[str] = None,
    text_format: Optional[Type[BaseModel]] = None,
):
    """Initialize the WebSearchPreviewTool.

    Args:
        llm_config: The LLM configuration to use. This should be a dictionary
            containing the model name and other parameters.
        search_context_size: The size of the search context. One of `low`, `medium`, or `high`.
            `medium` is the default.
        user_location: The location of the user. This should be a dictionary containing
            the city, country, region, and timezone.
        instructions: Inserts a system (or developer) message as the first item in the model's context.
        text_format: The format of the text to be returned. This should be a subclass of `BaseModel`.
            The default is `None`, which means the text will be returned as a string.
    """
    self.web_search_tool_param = WebSearchToolParam(
        type="web_search_preview",
        search_context_size=search_context_size,
        user_location=UserLocation(**user_location) if user_location else None,  # type: ignore[typeddict-item]
    )
    self.instructions = instructions
    self.text_format = text_format

    if isinstance(llm_config, LLMConfig):
        llm_config = llm_config.model_dump()

    llm_config = copy.deepcopy(llm_config)

    if "config_list" not in llm_config:
        raise ValueError("llm_config must contain 'config_list' key")

    # Find first OpenAI model which starts with "gpt-4"
    self.model = None
    self.api_key = None
    for model in llm_config["config_list"]:
        if model["model"].startswith("gpt-4") and model.get("api_type", "openai") == "openai":
            self.model = model["model"]
            self.api_key = model.get("api_key", os.getenv("OPENAI_API_KEY"))
            break
    if self.model is None:
        raise ValueError(
            "No OpenAI model starting with 'gpt-4' found in llm_config, other models do not support web_search_preview"
        )

    if not self.model.startswith("gpt-4.1") and not self.model.startswith("gpt-4o-search-preview"):
        logging.warning(
            f"We recommend using a model starting with 'gpt-4.1' or 'gpt-4o-search-preview' for web_search_preview, but found {self.model}. "
            "This may result in suboptimal performance."
        )

    def web_search_preview(
        query: Annotated[str, "The search query. Add all relevant context to the query."],
    ) -> Union[str, Optional[BaseModel]]:
        client = OpenAI()

        if not self.text_format:
            response = client.responses.create(
                model=self.model,  # type: ignore[arg-type]
                tools=[self.web_search_tool_param],
                input=query,
                instructions=self.instructions,
            )
            return response.output_text

        else:
            response = client.responses.parse(
                model=self.model,  # type: ignore[arg-type]
                tools=[self.web_search_tool_param],
                input=query,
                instructions=self.instructions,
                text_format=self.text_format,
            )
            return response.output_parsed

    super().__init__(
        name="web_search_preview",
        description="Tool used to perform a web search. It can be used as google search or directly searching a specific website.",
        func_or_tool=web_search_preview,
    )

name property #

name

description property #

description

func property #

func

tool_schema property #

tool_schema

Get the schema for the tool.

This is the preferred way of handling function calls with OpeaAI and compatible frameworks.

function_schema property #

function_schema

Get the schema for the function.

This is the old way of handling function calls with OpenAI and compatible frameworks. It is provided for backward compatibility.

realtime_tool_schema property #

realtime_tool_schema

Get the schema for the tool.

This is the preferred way of handling function calls with OpeaAI and compatible frameworks.

web_search_tool_param instance-attribute #

web_search_tool_param = WebSearchToolParam(type='web_search_preview', search_context_size=search_context_size, user_location=UserLocation(**user_location) if user_location else None)

instructions instance-attribute #

instructions = instructions

text_format instance-attribute #

text_format = text_format

model instance-attribute #

model = None

api_key instance-attribute #

api_key = None

register_for_llm #

register_for_llm(agent)

Registers the tool for use with a ConversableAgent's language model (LLM).

This method registers the tool so that it can be invoked by the agent during interactions with the language model.

PARAMETER DESCRIPTION
agent

The agent to which the tool will be registered.

TYPE: ConversableAgent

Source code in autogen/tools/tool.py
def register_for_llm(self, agent: "ConversableAgent") -> None:
    """Registers the tool for use with a ConversableAgent's language model (LLM).

    This method registers the tool so that it can be invoked by the agent during
    interactions with the language model.

    Args:
        agent (ConversableAgent): The agent to which the tool will be registered.
    """
    if self._func_schema:
        agent.update_tool_signature(self._func_schema, is_remove=False)
    else:
        agent.register_for_llm()(self)

register_for_execution #

register_for_execution(agent)

Registers the tool for direct execution by a ConversableAgent.

This method registers the tool so that it can be executed by the agent, typically outside of the context of an LLM interaction.

PARAMETER DESCRIPTION
agent

The agent to which the tool will be registered.

TYPE: ConversableAgent

Source code in autogen/tools/tool.py
def register_for_execution(self, agent: "ConversableAgent") -> None:
    """Registers the tool for direct execution by a ConversableAgent.

    This method registers the tool so that it can be executed by the agent,
    typically outside of the context of an LLM interaction.

    Args:
        agent (ConversableAgent): The agent to which the tool will be registered.
    """
    agent.register_for_execution()(self)

register_tool #

register_tool(agent)

Register a tool to be both proposed and executed by an agent.

Equivalent to calling both register_for_llm and register_for_execution with the same agent.

Note: This will not make the agent recommend and execute the call in the one step. If the agent recommends the tool, it will need to be the next agent to speak in order to execute the tool.

PARAMETER DESCRIPTION
agent

The agent to which the tool will be registered.

TYPE: ConversableAgent

Source code in autogen/tools/tool.py
def register_tool(self, agent: "ConversableAgent") -> None:
    """Register a tool to be both proposed and executed by an agent.

    Equivalent to calling both `register_for_llm` and `register_for_execution` with the same agent.

    Note: This will not make the agent recommend and execute the call in the one step. If the agent
    recommends the tool, it will need to be the next agent to speak in order to execute the tool.

    Args:
        agent (ConversableAgent): The agent to which the tool will be registered.
    """
    self.register_for_llm(agent)
    self.register_for_execution(agent)