-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpython-client.py
More file actions
97 lines (76 loc) · 3.01 KB
/
python-client.py
File metadata and controls
97 lines (76 loc) · 3.01 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
"""Standalone Python client for engram.
Use this when building your own agent or application that needs
persistent memory without going through MCP. Talks directly to
the engram library.
"""
from engram.config import Config
from engram.store import Store, Memory, MemoryLayer, SourceType
from engram.embeddings import embed_documents, embed_query
from engram.retrieval import search as hybrid_search
from engram.surprise import compute_surprise, adjust_importance
from engram.entities import process_entities_for_memory
from engram.deep_retrieval import DeepReranker
from engram.lifecycle import apply_forgetting_curve, compute_retention
import uuid
def main():
# --- setup ---
config = Config.load()
store = Store(config)
store.init_db()
# load the deep reranker (if trained)
reranker_path = config.resolved_db_path.parent / "reranker.npz"
reranker = DeepReranker(model_path=reranker_path)
# --- store a memory with surprise scoring ---
content = "The deploy pipeline uses blue-green deployment with a 5-minute canary window"
mem = Memory(
id=str(uuid.uuid4()),
content=content,
source_type=SourceType.HUMAN,
layer=MemoryLayer.PROCEDURAL,
importance=0.8,
)
# embed
emb = embed_documents([content], config.embedding_model)
if emb.size > 0:
mem.embedding = emb[0]
# compute surprise before storing
surprise = compute_surprise(mem.embedding, store)
mem.importance = adjust_importance(mem.importance, surprise)
mem.metadata["surprise"] = surprise["surprise"]
print(f"Surprise: {surprise['surprise']:.3f}")
print(f"Adjusted importance: {mem.importance:.3f}")
if surprise["is_duplicate"]:
print(f"Warning: near-duplicate of {surprise['nearest_id']}")
# save
store.save_memory(mem)
process_entities_for_memory(store, mem.id, content)
print(f"Stored memory {mem.id}")
# --- search with deep reranker ---
results = hybrid_search(
"deployment process",
store, config,
top_k=5,
deep_reranker=reranker,
)
print(f"\nSearch results for 'deployment process':")
for r in results:
print(f" [{r.memory.layer}] {r.memory.content[:80]}...")
print(f" score={r.score:.4f} sources={r.sources}")
# --- check retention for a memory ---
for r in results[:1]:
retention = compute_retention(r.memory, config)
print(f"\n Retention score: {retention:.4f}")
print(f" Access count: {r.memory.access_count}")
print(f" Layer: {r.memory.layer}")
# --- train the reranker (if enough data) ---
if not reranker.is_trained:
train_result = reranker.train(store, epochs=50)
print(f"\nReranker training: {train_result}")
# --- run lifecycle sweep ---
stats = apply_forgetting_curve(store, config)
print(f"\nLifecycle sweep: {stats}")
# --- stats ---
print(f"\nSystem stats: {store.get_stats()}")
store.close()
if __name__ == "__main__":
main()