from memori import Memorifrom openai import OpenAIclient = OpenAI()mem = Memori().llm.register(client)# Set attribution for a specific user and workflowmem.attribution( entity_id="user-123", # Unique user identifier process_id="support-chat" # Workflow/application identifier)# All subsequent LLM calls will use this attributionresponse = client.chat.completions.create( model="gpt-4o-mini", messages=[{"role": "user", "content": "I prefer email notifications"}],)
import { Memori } from 'memori';import { OpenAI } from 'openai';const client = new OpenAI();const mem = new Memori().llm.register(client);// Set attribution for a specific user and workflowmem.attribution( 'user-123', // Unique user identifier 'support-chat' // Workflow/application identifier);// All subsequent LLM calls will use this attributionconst response = await client.chat.completions.create({ model: 'gpt-4o-mini', messages: [{ role: 'user', content: 'I prefer email notifications' }],});
Memori uses session IDs to group related conversations. Each session represents a distinct conversation thread.
Python
TypeScript
from memori import Memorifrom openai import OpenAIclient = OpenAI()mem = Memori().llm.register(client)mem.attribution(entity_id="user-123")# Start a new conversation (automatic session ID)response1 = client.chat.completions.create( model="gpt-4o-mini", messages=[{"role": "user", "content": "I'm planning a trip to Tokyo"}],)# Save session ID for latertokyo_session_id = mem.config.session_idprint(f"Session ID: {tokyo_session_id}")# Start a completely new conversationmem.new_session()response2 = client.chat.completions.create( model="gpt-4o-mini", messages=[{"role": "user", "content": "I'm planning a trip to Paris"}],)# Resume the Tokyo conversationmem.set_session(tokyo_session_id)response3 = client.chat.completions.create( model="gpt-4o-mini", messages=[{"role": "user", "content": "What was I planning?"}],)# AI will remember: "You were planning a trip to Tokyo"
import { Memori } from 'memori';import { OpenAI } from 'openai';const client = new OpenAI();const mem = new Memori().llm.register(client);mem.attribution('user-123');// Start a new conversation (automatic session ID)const response1 = await client.chat.completions.create({ model: 'gpt-4o-mini', messages: [{ role: 'user', content: "I'm planning a trip to Tokyo" }],});// Save session ID for laterconst tokyoSessionId = mem.session.id;console.log(`Session ID: ${tokyoSessionId}`);// Start a completely new conversationmem.resetSession();const response2 = await client.chat.completions.create({ model: 'gpt-4o-mini', messages: [{ role: 'user', content: "I'm planning a trip to Paris" }],});// Resume the Tokyo conversationmem.setSession(tokyoSessionId);const response3 = await client.chat.completions.create({ model: 'gpt-4o-mini', messages: [{ role: 'user', content: 'What was I planning?' }],});// AI will remember: "You were planning a trip to Tokyo"