This video is a step-by-step easy tutorial to install Mem0 locally and integrate it with Ollama local model.
Code:
conda create -n mem python=3.11 -y && conda activate mem
pip install torch
pip install -U transformers sentencepiece accelerate
pip install sentence_transformers
pip install ollama
pip install mem0ai
import os
from mem0 import Memory
os.environ["OPENAI_API_KEY"] = ""
config = {
"llm": {
"provider": "ollama",
"config": {
"model": "llama3.1:latest",
"temperature": 0.1,
"max_tokens": 2000,
}
}
}
m = Memory.from_config(config)
m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"})
# Get all memories
all_memories = m.get_all()
print(all_memories)
# Get a single memory by ID
specific_memory = m.get("59565340-c742-4e09-8128-702e810cb4fd")
print(specific_memory)
related_memories = m.search(query="alice hobbies?", user_id="alice")
print(related_memories)
result = m.update(memory_id="59565340-c742-4e09-8128-702e810cb4fd", data="Visited Brisbane in Winter")
print(result)
m.delete(memory_id="59565340-c742-4e09-8128-702e810cb4fd") # Delete a memory
m.delete_all(user_id="alice") # Delete all memories
all_memories = m.get_all()
print(all_memories)
No comments:
Post a Comment