langgraph adapter
This commit is contained in:
135
langgraph_qwen/factory.py
Normal file
135
langgraph_qwen/factory.py
Normal file
@@ -0,0 +1,135 @@
|
||||
import os
|
||||
from typing import Any, Dict, List, Optional
|
||||
from .utils import ensure_env_loaded
|
||||
|
||||
|
||||
def _env(var: str, default: Optional[str] = None) -> Optional[str]:
|
||||
v = os.getenv(var)
|
||||
return v if v is not None and v != "" else default
|
||||
|
||||
|
||||
def get_qwen_chat_model(
|
||||
*,
|
||||
model: Optional[str] = None,
|
||||
api_key: Optional[str] = None,
|
||||
base_url: Optional[str] = None,
|
||||
temperature: Optional[float] = None,
|
||||
max_tokens: Optional[int] = None,
|
||||
extra: Optional[Dict[str, Any]] = None,
|
||||
prefer: str = "openai_compat",
|
||||
):
|
||||
"""
|
||||
Return a LangChain ChatModel configured for Qwen3-Coder(-Flash).
|
||||
|
||||
Tries the OpenAI-compatible client (langchain_openai.ChatOpenAI) by default.
|
||||
If unavailable and prefer != 'openai_compat', will try DashScope
|
||||
(langchain_community.chat_models.tongyi.ChatTongyi).
|
||||
|
||||
This function does not import heavy deps at module import time; imports are
|
||||
local to keep repo light.
|
||||
"""
|
||||
|
||||
model = model or _env("QWEN_MODEL", "qwen3-coder-flash")
|
||||
# Load .env variables once if available
|
||||
ensure_env_loaded()
|
||||
api_key = (
|
||||
api_key
|
||||
or _env("QWEN_API_KEY")
|
||||
or _env("GPUSTACK_API_KEY")
|
||||
or _env("OPENAI_API_KEY")
|
||||
or _env("DASHSCOPE_API_KEY")
|
||||
)
|
||||
base_url = base_url or _env("QWEN_BASE_URL") or _env("OPENAI_BASE_URL")
|
||||
|
||||
# Common generation params
|
||||
gen_kwargs: Dict[str, Any] = {}
|
||||
if temperature is not None:
|
||||
gen_kwargs["temperature"] = temperature
|
||||
if max_tokens is not None:
|
||||
gen_kwargs["max_tokens"] = max_tokens
|
||||
if extra:
|
||||
gen_kwargs.update(extra)
|
||||
|
||||
err: Optional[Exception] = None
|
||||
|
||||
if prefer == "custom":
|
||||
try:
|
||||
from .chat_model import ChatQwenOpenAICompat
|
||||
|
||||
if api_key is None:
|
||||
api_key = (
|
||||
_env("QWEN_API_KEY")
|
||||
or _env("GPUSTACK_API_KEY")
|
||||
or _env("OPENAI_API_KEY")
|
||||
or _env("DASHSCOPE_API_KEY")
|
||||
)
|
||||
chat = ChatQwenOpenAICompat(
|
||||
model=model,
|
||||
api_key=api_key,
|
||||
base_url=base_url,
|
||||
**gen_kwargs,
|
||||
)
|
||||
return chat
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Custom adapter init failed: {e}")
|
||||
|
||||
if prefer == "openai_compat":
|
||||
try:
|
||||
from langchain_openai import ChatOpenAI # type: ignore
|
||||
|
||||
if api_key is None:
|
||||
raise ValueError("QWEN_API_KEY/OPENAI_API_KEY/DASHSCOPE_API_KEY is required")
|
||||
|
||||
# ChatOpenAI supports base_url override for OpenAI-compatible servers
|
||||
chat = ChatOpenAI(
|
||||
model=model,
|
||||
api_key=api_key,
|
||||
base_url=base_url,
|
||||
**gen_kwargs,
|
||||
)
|
||||
return chat
|
||||
except Exception as e:
|
||||
err = e
|
||||
|
||||
# Fallback: DashScope (Tongyi) path via community provider
|
||||
try:
|
||||
from langchain_community.chat_models.tongyi import ChatTongyi # type: ignore
|
||||
|
||||
# ChatTongyi uses DASHSCOPE_API_KEY from env by default; api_key arg optional
|
||||
chat = ChatTongyi(
|
||||
model=model,
|
||||
dashscope_api_key=api_key,
|
||||
**gen_kwargs,
|
||||
)
|
||||
return chat
|
||||
except Exception as e2:
|
||||
raise RuntimeError(
|
||||
"Failed to create Qwen ChatModel. Tried OpenAI-compatible and DashScope paths.\n"
|
||||
f"Prefer='" + prefer + "' error: " + (str(err) if err else "None") + "\n" +
|
||||
"DashScope path error: " + str(e2)
|
||||
)
|
||||
|
||||
|
||||
def bind_qwen_tools(
|
||||
model, tools: List[Any], *, tool_choice: Optional[str] = None
|
||||
):
|
||||
"""
|
||||
Bind tools to a LangChain ChatModel with Qwen-friendly defaults.
|
||||
|
||||
- tools: list of BaseTool or functions decorated with @tool
|
||||
- tool_choice: 'auto' | 'required' | tool-name | None
|
||||
Returns: a model instance with bound tools.
|
||||
"""
|
||||
|
||||
# Many LangChain chat models implement .bind_tools(...)
|
||||
# We keep this thin and rely on LangChain's native tool schema handling.
|
||||
bound = model.bind_tools(tools)
|
||||
if tool_choice is not None:
|
||||
# Some providers accept tool_choice via .bind; others via gen kwargs
|
||||
# We try to set it via .bind when available.
|
||||
try:
|
||||
bound = bound.bind(tool_choice=tool_choice)
|
||||
except Exception:
|
||||
# If provider doesn't accept bind(tool_choice=..), attach to kwargs
|
||||
bound = bound.bind(extra_body={"tool_choice": tool_choice})
|
||||
return bound
|
||||
Reference in New Issue
Block a user