forked from Abraxas-365/langchain-rust
-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathlanggraph_memory_basic.rs
More file actions
141 lines (124 loc) · 5.33 KB
/
langgraph_memory_basic.rs
File metadata and controls
141 lines (124 loc) · 5.33 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
use langchain_ai_rust::langgraph::{
function_node_with_store, InMemorySaver, InMemoryStore, LangGraphError, MessagesState,
RunnableConfig, StateGraph, END, START,
};
use langchain_ai_rust::schemas::messages::Message;
/// Basic memory example for LangGraph
///
/// This example demonstrates:
/// 1. Using store for long-term memory across threads
/// 2. Accessing config and store in nodes
/// 3. Storing and retrieving user-specific memories
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Create a node that uses store for memory
let call_model = function_node_with_store(
"call_model",
|state: &MessagesState,
config: &RunnableConfig,
store: langchain_ai_rust::langgraph::StoreBox| {
let user_id = config.get_user_id().unwrap_or("default".to_string());
let last_message = state
.messages
.last()
.map(|m| m.content.clone())
.unwrap_or_default();
async move {
use std::collections::HashMap;
let namespace = ["memories", user_id.as_str()];
let last_msg_ref = last_message.as_str();
let memories = store
.search(&namespace, Some(last_msg_ref), Some(3))
.await
.map_err(|e| LangGraphError::ExecutionError(format!("Store error: {}", e)))?;
// Build context from memories
let memory_context: String = memories
.iter()
.map(|item| {
if let Some(data) = item.value.get("data") {
data.as_str().unwrap_or("").to_string()
} else {
String::new()
}
})
.collect::<Vec<_>>()
.join("\n");
// Check if user wants to remember something
let last_msg_lower = last_message.to_lowercase();
if last_msg_lower.contains("remember") {
// Extract memory from message (simplified)
let memory_text = last_message.replace("remember", "").trim().to_string();
if !memory_text.is_empty() {
let memory_id = format!("memory-{}", chrono::Utc::now().timestamp());
store
.put(
&namespace,
&memory_id,
serde_json::json!({"data": memory_text}),
)
.await
.map_err(|e| {
LangGraphError::ExecutionError(format!("Store error: {}", e))
})?;
}
}
// Generate response (simplified - in real app, call LLM)
let response_text = if !memory_context.is_empty() {
format!(
"Based on your memories: {}. Response to: {}",
memory_context, last_message
)
} else {
format!("Response to: {}", last_message)
};
let mut update = HashMap::new();
update.insert(
"messages".to_string(),
serde_json::to_value(vec![Message::new_ai_message(response_text)])?,
);
Ok(update)
}
},
);
// Build the graph
let mut graph = StateGraph::<MessagesState>::new();
graph.add_node("call_model", call_model)?;
graph.add_edge(START, "call_model");
graph.add_edge("call_model", END);
// Create checkpointer and store
let checkpointer = std::sync::Arc::new(InMemorySaver::new());
let store = std::sync::Arc::new(InMemoryStore::new());
// Compile with checkpointer and store
let compiled =
graph.compile_with_persistence(Some(checkpointer.clone()), Some(store.clone()))?;
// First conversation thread - user introduces themselves
println!("=== Thread 1: User introduces themselves ===");
let config1 = {
let mut cfg = RunnableConfig::with_thread_id("thread-1");
cfg.configurable
.insert("user_id".to_string(), serde_json::json!("user-123"));
cfg
};
let initial_state1 = MessagesState::with_messages(vec![Message::new_human_message(
"Hi! Remember: my name is Bob",
)]);
let result1 = compiled
.invoke_with_config(Some(initial_state1), &config1)
.await?;
println!("Response: {}", result1.messages.last().unwrap().content);
// Second conversation thread - same user, different thread
println!("\n=== Thread 2: Same user, different thread ===");
let config2 = {
let mut cfg = RunnableConfig::with_thread_id("thread-2");
cfg.configurable
.insert("user_id".to_string(), serde_json::json!("user-123"));
cfg
};
let initial_state2 =
MessagesState::with_messages(vec![Message::new_human_message("What is my name?")]);
let result2 = compiled
.invoke_with_config(Some(initial_state2), &config2)
.await?;
println!("Response: {}", result2.messages.last().unwrap().content);
Ok(())
}