Skip to content

GoogleSearchTool

autogen.tools.experimental.GoogleSearchTool #

GoogleSearchTool(*, search_api_key=None, search_engine_id=None, use_internal_llm_tool_if_available=True)

Bases: Tool

GoogleSearchTool is a tool that uses the Google Search API to perform a search.

GoogleSearchTool is a tool that uses the Google Search API to perform a search.

PARAMETER DESCRIPTION
search_api_key

The API key for the Google Search API.

TYPE: Optional[str] DEFAULT: None

search_engine_id

The search engine ID for the Google Search API.

TYPE: Optional[str] DEFAULT: None

use_internal_llm_tool_if_available

Whether to use the predefined (e.g. Gemini GenaAI) search tool. Currently, this can only be used for agents with the Gemini (GenAI) configuration.

TYPE: bool DEFAULT: True

Source code in autogen/tools/experimental/google_search/google_search.py
def __init__(
    self,
    *,
    search_api_key: Optional[str] = None,
    search_engine_id: Optional[str] = None,
    use_internal_llm_tool_if_available: bool = True,
):
    """GoogleSearchTool is a tool that uses the Google Search API to perform a search.

    Args:
        search_api_key: The API key for the Google Search API.
        search_engine_id: The search engine ID for the Google Search API.
        use_internal_llm_tool_if_available: Whether to use the predefined (e.g. Gemini GenaAI) search tool. Currently, this can only be used for agents with the Gemini (GenAI) configuration.
    """
    self.search_api_key = search_api_key
    self.search_engine_id = search_engine_id
    self.use_internal_llm_tool_if_available = use_internal_llm_tool_if_available

    if not use_internal_llm_tool_if_available and (search_api_key is None or search_engine_id is None):
        raise ValueError(
            "search_api_key and search_engine_id must be provided if use_internal_llm_tool_if_available is False"
        )

    if use_internal_llm_tool_if_available and (search_api_key is not None or search_engine_id is not None):
        logging.warning("search_api_key and search_engine_id will be ignored if internal LLM tool is available")

    def google_search(
        query: Annotated[str, "The search query."],
        search_api_key: Annotated[Optional[str], Depends(on(search_api_key))],
        search_engine_id: Annotated[Optional[str], Depends(on(search_engine_id))],
        num_results: Annotated[int, "The number of results to return."] = 10,
    ) -> list[dict[str, Any]]:
        if search_api_key is None or search_engine_id is None:
            raise ValueError(
                "Your LLM is not configured to use prebuilt google-search tool.\n"
                "Please provide search_api_key and search_engine_id.\n"
            )
        return _google_search(query, search_api_key, search_engine_id, num_results)

    super().__init__(
        # GeminiClient will look for a tool with the name "prebuilt_google_search"
        name="prebuilt_google_search" if use_internal_llm_tool_if_available else "google_search",
        description="Use the Google Search API to perform a search.",
        func_or_tool=google_search,
    )

name property #

name

description property #

description

func property #

func

tool_schema property #

tool_schema

Get the schema for the tool.

This is the preferred way of handling function calls with OpeaAI and compatible frameworks.

function_schema property #

function_schema

Get the schema for the function.

This is the old way of handling function calls with OpenAI and compatible frameworks. It is provided for backward compatibility.

realtime_tool_schema property #

realtime_tool_schema

Get the schema for the tool.

This is the preferred way of handling function calls with OpeaAI and compatible frameworks.

search_api_key instance-attribute #

search_api_key = search_api_key

search_engine_id instance-attribute #

search_engine_id = search_engine_id

use_internal_llm_tool_if_available instance-attribute #

use_internal_llm_tool_if_available = use_internal_llm_tool_if_available

register_for_llm #

register_for_llm(agent)

Registers the tool for use with a ConversableAgent's language model (LLM).

This method registers the tool so that it can be invoked by the agent during interactions with the language model.

PARAMETER DESCRIPTION
agent

The agent to which the tool will be registered.

TYPE: ConversableAgent

Source code in autogen/tools/tool.py
def register_for_llm(self, agent: "ConversableAgent") -> None:
    """Registers the tool for use with a ConversableAgent's language model (LLM).

    This method registers the tool so that it can be invoked by the agent during
    interactions with the language model.

    Args:
        agent (ConversableAgent): The agent to which the tool will be registered.
    """
    if self._func_schema:
        agent.update_tool_signature(self._func_schema, is_remove=False)
    else:
        agent.register_for_llm()(self)

register_for_execution #

register_for_execution(agent)

Registers the tool for direct execution by a ConversableAgent.

This method registers the tool so that it can be executed by the agent, typically outside of the context of an LLM interaction.

PARAMETER DESCRIPTION
agent

The agent to which the tool will be registered.

TYPE: ConversableAgent

Source code in autogen/tools/tool.py
def register_for_execution(self, agent: "ConversableAgent") -> None:
    """Registers the tool for direct execution by a ConversableAgent.

    This method registers the tool so that it can be executed by the agent,
    typically outside of the context of an LLM interaction.

    Args:
        agent (ConversableAgent): The agent to which the tool will be registered.
    """
    agent.register_for_execution()(self)

register_tool #

register_tool(agent)

Register a tool to be both proposed and executed by an agent.

Equivalent to calling both register_for_llm and register_for_execution with the same agent.

Note: This will not make the agent recommend and execute the call in the one step. If the agent recommends the tool, it will need to be the next agent to speak in order to execute the tool.

PARAMETER DESCRIPTION
agent

The agent to which the tool will be registered.

TYPE: ConversableAgent

Source code in autogen/tools/tool.py
def register_tool(self, agent: "ConversableAgent") -> None:
    """Register a tool to be both proposed and executed by an agent.

    Equivalent to calling both `register_for_llm` and `register_for_execution` with the same agent.

    Note: This will not make the agent recommend and execute the call in the one step. If the agent
    recommends the tool, it will need to be the next agent to speak in order to execute the tool.

    Args:
        agent (ConversableAgent): The agent to which the tool will be registered.
    """
    self.register_for_llm(agent)
    self.register_for_execution(agent)