44 lines
1.0 KiB
Python
44 lines
1.0 KiB
Python
from typing import TypedDict
|
||
|
||
from langgraph.graph import StateGraph, START
|
||
from langchain_core.messages import HumanMessage
|
||
|
||
from langgraph_qwen import ChatQwenOpenAICompat
|
||
|
||
|
||
class SimpleState(TypedDict):
|
||
topic: str
|
||
joke: str
|
||
|
||
|
||
model = ChatQwenOpenAICompat(temperature=0)
|
||
|
||
|
||
def call_model(state: SimpleState):
|
||
llm_response = model.invoke([HumanMessage(content=f"Generate a joke about {state['topic']}")])
|
||
return {"joke": llm_response.content}
|
||
|
||
|
||
graph = (
|
||
StateGraph(SimpleState)
|
||
.add_node("call_model", call_model)
|
||
.add_edge(START, "call_model")
|
||
.compile()
|
||
)
|
||
|
||
|
||
def main():
|
||
print("=" * 60)
|
||
print("💬 LangGraph stream_mode='messages' 示例(捕获 LLM tokens)")
|
||
print("=" * 60)
|
||
for msg, meta in graph.stream({"topic": "cats"}, stream_mode="messages"):
|
||
if hasattr(msg, "content") and msg.content:
|
||
node = meta.get("langgraph_node", "unknown")
|
||
print(f"[{node}] {msg.content}", end="", flush=True)
|
||
print("\n\n✅ 完成!")
|
||
|
||
|
||
if __name__ == "__main__":
|
||
main()
|
||
|