#!/usr/bin/env python3 """ Basic streaming example - shows text appearing in real-time. This demonstrates the key benefit of streaming: users see responses immediately as they're generated, rather than waiting for the complete response. """ import asyncio import sys import os # Add src to path for development sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src')) from claude import ClaudeAgentClient, AgentOptions async def main(): """Stream a simple conversation.""" print("=" * 60) print("Basic Streaming Example") print("=" * 60) # Check for API key if not os.getenv('ANTHROPIC_API_KEY'): print("\n❌ Error: ANTHROPIC_API_KEY environment variable not set") print(" Set your API key: export ANTHROPIC_API_KEY='sk-ant-...'") return 1 # Create client with streaming enabled (default) options = AgentOptions( model="claude-sonnet-4-5-20250929", stream=True # Streaming is default, but shown here for clarity ) async with ClaudeAgentClient(options=options) as client: print("\n💬 You: Tell me a short joke about programming") print("\n🤖 Claude: ", end='', flush=True) # Stream the response async for chunk in client.send_message_stream("Tell me a short joke about programming"): if chunk.text_delta: # Print text as it arrives print(chunk.text_delta, end='', flush=True) print("\n") # New line after response # Second message in same conversation print("\n💬 You: Explain why that's funny") print("\n🤖 Claude: ", end='', flush=True) async for chunk in client.send_message_stream("Explain why that's funny"): if chunk.text_delta: print(chunk.text_delta, end='', flush=True) print("\n") print("\n" + "=" * 60) print("✅ Streaming complete!") print("=" * 60) return 0 if __name__ == "__main__": try: exit_code = asyncio.run(main()) sys.exit(exit_code) except KeyboardInterrupt: print("\n\n⚠️ Interrupted by user") sys.exit(130) except Exception as e: print(f"\n❌ Error: {e}") import traceback traceback.print_exc() sys.exit(1)