templates: add gemini functions agent (#17141)

Co-authored-by: Erick Friis <erick@langchain.dev>
pull/17085/head^2
Harrison Chase 4 months ago committed by GitHub
parent aeb6b38901
commit 19546081c6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2023 LangChain, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,76 @@
# gemini-functions-agent
This template creates an agent that uses Google Gemini function calling to communicate its decisions on what actions to take.
This example creates an agent that can optionally look up information on the internet using Tavily's search engine.
## Environment Setup
The following environment variables need to be set:
Set the `TAVILY_API_KEY` environment variable to access Tavily.
You will also need to authenticate with Google:
```shell
gcloud auth application-default login
```
## Usage
To use this package, you should first have the LangChain CLI installed:
```shell
pip install -U langchain-cli
```
To create a new LangChain project and install this as the only package, you can do:
```shell
langchain app new my-app --package gemini-functions-agent
```
If you want to add this to an existing project, you can just run:
```shell
langchain app add gemini-functions-agent
```
And add the following code to your `server.py` file:
```python
from gemini_functions_agent import agent_executor as gemini_functions_agent_chain
add_routes(app, gemini_functions_agent_chain, path="/openai-functions-agent")
```
(Optional) Let's now configure LangSmith.
LangSmith will help us trace, monitor and debug LangChain applications.
LangSmith is currently in private beta, you can sign up [here](https://smith.langchain.com/).
If you don't have access, you can skip this section
```shell
export LANGCHAIN_TRACING_V2=true
export LANGCHAIN_API_KEY=<your-api-key>
export LANGCHAIN_PROJECT=<your-project> # if not specified, defaults to "default"
```
If you are inside this directory, then you can spin up a LangServe instance directly by:
```shell
langchain serve
```
This will start the FastAPI app with a server is running locally at
[http://localhost:8000](http://localhost:8000)
We can see all templates at [http://127.0.0.1:8000/docs](http://127.0.0.1:8000/docs)
We can access the playground at [http://127.0.0.1:8000/gemini-functions-agent/playground](http://127.0.0.1:8000/gemini-functions-agent/playground)
We can access the template from code with:
```python
from langserve.client import RemoteRunnable
runnable = RemoteRunnable("http://localhost:8000/gemini-functions-agent")
```

@ -0,0 +1,3 @@
from gemini_functions_agent.agent import agent_executor
__all__ = ["agent_executor"]

@ -0,0 +1,81 @@
from typing import List, Tuple
from langchain.agents import AgentExecutor
from langchain.agents.format_scratchpad import format_to_openai_function_messages
from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser
from langchain.utilities.tavily_search import TavilySearchAPIWrapper
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain_core.messages import AIMessage, HumanMessage
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_google_genai import ChatGoogleGenerativeAI
# Create the tool
search = TavilySearchAPIWrapper()
description = """"A search engine optimized for comprehensive, accurate, \
and trusted results. Useful for when you need to answer questions \
about current events or about recent information. \
Input should be a search query. \
If the user is asking about something that you don't know about, \
you should probably use this tool to see if that can provide any information."""
tavily_tool = TavilySearchResults(api_wrapper=search, description=description)
tools = [tavily_tool]
llm = ChatGoogleGenerativeAI(temperature=0, model="gemini-pro")
prompt = ChatPromptTemplate.from_messages(
[
MessagesPlaceholder(variable_name="chat_history"),
("user", "{input}"),
MessagesPlaceholder(variable_name="agent_scratchpad"),
]
)
llm_with_tools = llm.bind(
functions=[
{
"name": tavily_tool.name,
"description": tavily_tool.description,
"parameters": {
"type": "object",
"properties": {"query": {"type": "string"}},
"required": ["query"],
},
}
]
)
def _format_chat_history(chat_history: List[Tuple[str, str]]):
buffer = []
for human, ai in chat_history:
buffer.append(HumanMessage(content=human))
buffer.append(AIMessage(content=ai))
return buffer
agent = (
{
"input": lambda x: x["input"],
"chat_history": lambda x: _format_chat_history(x["chat_history"]),
"agent_scratchpad": lambda x: format_to_openai_function_messages(
x["intermediate_steps"]
),
}
| prompt
| llm_with_tools
| OpenAIFunctionsAgentOutputParser()
)
class AgentInput(BaseModel):
input: str
chat_history: List[Tuple[str, str]] = Field(
..., extra={"widget": {"type": "chat", "input": "input", "output": "output"}}
)
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True).with_types(
input_type=AgentInput
)

@ -0,0 +1,5 @@
from openai_functions_agent.agent import agent_executor
if __name__ == "__main__":
question = "who won the womens world cup in 2023?"
print(agent_executor.invoke({"input": question, "chat_history": []}))

File diff suppressed because it is too large Load Diff

@ -0,0 +1,33 @@
[tool.poetry]
name = "gemini-functions-agent"
version = "0.1.0"
description = "Agent using Gemini function calling to execute functions, including search"
authors = [
"Harrison Chase",
]
readme = "README.md"
[tool.poetry.dependencies]
python = ">=3.9,<4.0"
langchain = "^0.1"
tavily-python = "^0.1.9"
langchain-google-genai = ">=0.0.7,<0.1"
[tool.poetry.group.dev.dependencies]
langchain-cli = ">=0.0.21"
[tool.langserve]
export_module = "gemini_functions_agent"
export_attr = "agent_executor"
[tool.templates-hub]
use-case = "research"
author = "LangChain"
integrations = ["Google", "Tavily"]
tags = ["search", "agents", "function-calling"]
[build-system]
requires = [
"poetry-core",
]
build-backend = "poetry.core.masonry.api"
Loading…
Cancel
Save