from openai import OpenAIfrom memori import Memoriclient = OpenAI()mem = Memori().llm.register(client)mem.attribution(entity_id="user-123", process_id="chat-app")# Stream the responsestream = client.chat.completions.create( model="gpt-4o-mini", messages=[{"role": "user", "content": "Tell me about quantum computing"}], stream=True,)# Process chunks as they arrivefull_response = ""for chunk in stream: if chunk.choices[0].delta.content: content = chunk.choices[0].delta.content full_response += content print(content, end="", flush=True)print() # New line# Memori automatically captures the complete conversationmem.augmentation.wait() # Wait for memory processing
import { OpenAI } from 'openai';import { Memori } from 'memori';const client = new OpenAI();const mem = new Memori().llm.register(client);mem.attribution('user-123', 'chat-app');async function streamResponse() { // Stream the response const stream = await client.chat.completions.create({ model: 'gpt-4o-mini', messages: [{ role: 'user', content: 'Tell me about quantum computing' }], stream: true, }); // Process chunks as they arrive let fullResponse = ''; for await (const chunk of stream) { const content = chunk.choices[0]?.delta?.content; if (content) { fullResponse += content; process.stdout.write(content); } } console.log(); // New line // Memori automatically captures the complete conversation await new Promise((resolve) => setTimeout(resolve, 1000));}streamResponse().catch(console.error);
from anthropic import Anthropicfrom memori import Memoriclient = Anthropic()mem = Memori().llm.register(client)mem.attribution(entity_id="user-789", process_id="claude-chat")# Stream with Claudewith client.messages.stream( model="claude-3-haiku-20240307", messages=[{"role": "user", "content": "Write a haiku about AI"}], max_tokens=100,) as stream: for text in stream.text_stream: print(text, end="", flush=True)print()mem.augmentation.wait()
import Anthropic from '@anthropic-ai/sdk';import { Memori } from 'memori';const client = new Anthropic();const mem = new Memori().llm.register(client);mem.attribution('user-789', 'claude-chat');async function streamClaude() { const stream = await client.messages.create({ model: 'claude-3-haiku-20240307', messages: [{ role: 'user', content: 'Write a haiku about AI' }], max_tokens: 100, stream: true, }); for await (const event of stream) { if (event.type === 'content_block_delta' && event.delta.type === 'text_delta') { process.stdout.write(event.delta.text); } } console.log(); await new Promise((resolve) => setTimeout(resolve, 1000));}streamClaude().catch(console.error);