Last active
August 1, 2024 05:17
-
-
Save rajivmehtaflex/8295cafdbb18a602b7c4fe90579a65e8 to your computer and use it in GitHub Desktop.
This GIST conain Langchain related references.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from langchain_ollama.chat_models import ChatOllama | |
from langchain_core.tools import tool | |
from typing import Any, Dict, Optional, TypedDict | |
from langchain_core.runnables import RunnableConfig | |
from langchain_core.output_parsers import JsonOutputParser | |
from langchain_core.output_parsers import JsonOutputParser | |
from langchain_core.prompts import ChatPromptTemplate | |
from langchain_core.tools import render_text_description | |
model=ChatOllama(model='gemma2:2b',temperature=0.3) | |
@tool | |
def multiply(x: float, y: float) -> float: | |
"""Multiply two numbers together.""" | |
return x * y | |
@tool | |
def add(x: int, y: int) -> int: | |
"Add two numbers." | |
return x + y | |
class ToolCallRequest(TypedDict): | |
"""A typed dict that shows the inputs into the invoke_tool function.""" | |
name: str | |
arguments: Dict[str, Any] | |
def invoke_tool(tool_call_request: ToolCallRequest, config: Optional[RunnableConfig] = None): | |
tool_name_to_tool = {tool.name: tool for tool in tools} | |
name = tool_call_request["name"] | |
requested_tool = tool_name_to_tool[name] | |
return requested_tool.invoke(tool_call_request["arguments"], config=config) | |
tools = [multiply, add] | |
rendered_tools = render_text_description(tools) | |
system_prompt = f"""\ | |
You are an assistant that has access to the following set of tools. | |
Here are the names and descriptions for each tool: | |
{rendered_tools} | |
Given the user input, return the name and input of the tool to use. | |
Return your response as a JSON blob with 'name' and 'arguments' keys. | |
The `arguments` should be a dictionary, with keys corresponding | |
to the argument names and the values corresponding to the requested values. | |
""" | |
prompt = ChatPromptTemplate.from_messages( | |
[("system", system_prompt), ("user", "{input}")] | |
) | |
chain = prompt | model | JsonOutputParser() | invoke_tool | |
response=chain.invoke({"input": "what's thirteen times 4"}) | |
response |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment