Created
August 19, 2025 09:32
-
-
Save reachlin/4c881a50accc7117fe397eb6a6e610de to your computer and use it in GitHub Desktop.
How to configure langchain to use ali cloud qwen LLM model
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Because ali could LLM is compatible with openai, | |
# we can configure its model just like gpt-x like beblow | |
from langgraph.prebuilt import create_react_agent | |
from langchain.chat_models import init_chat_model | |
from dotenv import load_dotenv | |
import os | |
load_dotenv() | |
def get_weather(city: str) -> str: | |
"""Get weather for a given city.""" | |
return f"It's always sunny in {city}!" | |
model = init_chat_model( | |
"openai:qwen-plus", | |
api_key=os.getenv("ALI_DASHSCOPE_API_KEY"), | |
base_url="https://dashscope.aliyuncs.com/compatible-mode/v1" | |
) | |
agent = create_react_agent( | |
model=model, | |
tools=[get_weather], | |
prompt="You are a helpful assistant" | |
) | |
# Run the agent | |
agent.invoke( | |
{"messages": [{"role": "user", "content": "what is the weather in sf"}]} | |
) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
you need a api key from ali cloud and put it in
.env
file asALI_DASHSCOPE_API_KEY
.