Skip to content

Commit 576f302

Browse files
committed
enabled langgraph test client use json dump
1 parent 84b4f21 commit 576f302

File tree

1 file changed

+16
-4
lines changed

1 file changed

+16
-4
lines changed

‎samples/python/agents/langgraph/app/test_client.py‎

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import json
12
import logging
23

34
from typing import Any
@@ -125,7 +126,9 @@ async def main() -> None:
125126
)
126127

127128
response = await client.send_message(request)
128-
print(response.model_dump(mode='json', exclude_none=True))
129+
print("=== Send Message Response ===")
130+
print(json.dumps(response.model_dump(mode='json', exclude_none=True), indent=2))
131+
print("=" * 50)
129132
# --8<-- [end:send_message]
130133

131134
# --8<-- [start:Multiturn]
@@ -147,7 +150,9 @@ async def main() -> None:
147150
)
148151

149152
response = await client.send_message(request)
150-
print(response.model_dump(mode='json', exclude_none=True))
153+
print("=== Multi-turn First Response ===")
154+
print(json.dumps(response.model_dump(mode='json', exclude_none=True), indent=2))
155+
print("=" * 50)
151156

152157
task_id = response.root.result.id
153158
context_id = response.root.result.context_id
@@ -168,7 +173,9 @@ async def main() -> None:
168173
)
169174

170175
second_response = await client.send_message(second_request)
171-
print(second_response.model_dump(mode='json', exclude_none=True))
176+
print("=== Multi-turn Second Response ===")
177+
print(json.dumps(second_response.model_dump(mode='json', exclude_none=True), indent=2))
178+
print("=" * 50)
172179
# --8<-- [end:Multiturn]
173180

174181
# --8<-- [start:send_message_streaming]
@@ -179,8 +186,13 @@ async def main() -> None:
179186

180187
stream_response = client.send_message_streaming(streaming_request)
181188

189+
print("=== Streaming Response Chunks ===")
190+
chunk_count = 0
182191
async for chunk in stream_response:
183-
print(chunk.model_dump(mode='json', exclude_none=True))
192+
chunk_count += 1
193+
print(f"--- Chunk {chunk_count} ---")
194+
print(json.dumps(chunk.model_dump(mode='json', exclude_none=True), indent=2))
195+
print("=" * 50)
184196
# --8<-- [end:send_message_streaming]
185197

186198

0 commit comments

Comments
 (0)