forked from MemTensor/MemOS
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathreranker.py
More file actions
189 lines (166 loc) · 6.77 KB
/
reranker.py
File metadata and controls
189 lines (166 loc) · 6.77 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
import os
import uuid
from dotenv import load_dotenv
from memos import log
from memos.configs.embedder import EmbedderConfigFactory
from memos.configs.reranker import RerankerConfigFactory
from memos.embedders.factory import EmbedderFactory
from memos.memories.textual.item import TextualMemoryItem, TreeNodeTextualMemoryMetadata
from memos.reranker.factory import RerankerFactory
load_dotenv()
logger = log.get_logger(__name__)
def make_item(text: str) -> TextualMemoryItem:
"""Build a minimal TextualMemoryItem; embedding will be populated later."""
return TextualMemoryItem(
id=str(uuid.uuid4()),
memory=text,
metadata=TreeNodeTextualMemoryMetadata(
user_id=None,
session_id=None,
status="activated",
type="fact",
memory_time="2024-01-01",
source="conversation",
confidence=100.0,
tags=[],
visibility="public",
updated_at="2025-01-01T00:00:00",
memory_type="LongTermMemory",
key="demo_key",
sources=["demo://example"],
embedding=[],
background="demo background...",
),
)
def show_ranked(title: str, ranked: list[tuple[TextualMemoryItem, float]], top_n: int = 5) -> None:
print(f"\n=== {title} ===")
for i, (item, score) in enumerate(ranked[:top_n], start=1):
preview = (item.memory[:80] + "...") if len(item.memory) > 80 else item.memory
print(f"[#{i}] score={score:.6f} | {preview}")
def main():
# -------------------------------
# 1) Build the embedder (real vectors)
# You may need to set valid OPENAI_API_KEY and OPENAI_API_BASE in your environment variables.
# -------------------------------
embedder_cfg = EmbedderConfigFactory.model_validate(
{
"backend": "universal_api",
"config": {
"provider": "openai", # or "azure"
"api_key": os.getenv("OPENAI_API_KEY"),
"model_name_or_path": "text-embedding-3-large",
"base_url": os.getenv("OPENAI_API_BASE"), # optional
},
}
)
"""
# -------------------------------
# Optional: Build the embedder (using local sentence-transformers)
# -------------------------------
# Use a local model so no API key is required.
embedder_cfg = EmbedderConfigFactory.model_validate(
{
"backend": "sentence_transformer",
"config": {
"model_name_or_path": "nomic-ai/nomic-embed-text-v1.5",
"trust_remote_code": True,
},
}
)
"""
embedder = EmbedderFactory.from_config(embedder_cfg)
# -------------------------------
# 2) Prepare query + documents
# -------------------------------
query = "What is the capital of France?"
items = [
make_item("Paris is the capital of France."),
make_item("Berlin is the capital of Germany."),
make_item("The capital of Brazil is Brasilia."),
make_item("Apples and bananas are common fruits."),
make_item("The Eiffel Tower is a famous landmark in Paris."),
]
# -------------------------------
# 3) Embed query + docs with real embeddings
# -------------------------------
texts_to_embed = [query] + [it.memory for it in items]
vectors = embedder.embed(texts_to_embed) # real vectors from your provider/model
query_embedding = vectors[0]
doc_embeddings = vectors[1:]
# attach real embeddings back to items
for it, emb in zip(items, doc_embeddings, strict=False):
it.metadata.embedding = emb
items[0].metadata.user_id = "u_123"
items[0].metadata.session_id = "s_abc"
items[0].metadata.tags = [*items[0].metadata.tags, "paris"]
items[1].metadata.user_id = "u_124"
items[1].metadata.session_id = "s_xyz"
items[1].metadata.tags = [*items[1].metadata.tags, "germany"]
items[2].metadata.user_id = "u_125"
items[2].metadata.session_id = "s_ss3"
items[3].metadata.user_id = "u_126"
items[3].metadata.session_id = "s_ss4"
items[4].metadata.user_id = "u_127"
items[4].metadata.session_id = "s_ss5"
# -------------------------------
# 4) Rerank with cosine_local (uses your real embeddings)
# -------------------------------
cosine_cfg = RerankerConfigFactory.model_validate(
{
"backend": "cosine_local",
"config": {
# structural boosts (optional): uses metadata.background
"level_weights": {"topic": 1.0, "concept": 1.0, "fact": 1.0},
"level_field": "background",
},
}
)
cosine_reranker = RerankerFactory.from_config(cosine_cfg)
ranked_cosine = cosine_reranker.rerank(
query=query,
graph_results=items,
top_k=10,
query_embedding=query_embedding, # required by cosine_local
)
show_ranked("CosineLocal Reranker (with real embeddings)", ranked_cosine, top_n=5)
# -------------------------------
# 5) (Optional) Rerank with HTTP BGE (OpenAI-style /query+documents)
# Requires the service URL; no need for embeddings here
# -------------------------------
bge_url = os.getenv("BGE_RERANKER_URL") # e.g., "http://xxx.x.xxxxx.xxx:xxxx/v1/rerank"
if bge_url:
http_cfg = RerankerConfigFactory.model_validate(
{
"backend": "http_bge",
"config": {
"url": bge_url,
"model": os.getenv("BGE_RERANKER_MODEL", "bge-reranker-v2-m3"),
"timeout": int(os.getenv("BGE_RERANKER_TIMEOUT", "10")),
"boost_weights": {"user_id": 0.5, "tags": 0.2},
},
}
)
http_reranker = RerankerFactory.from_config(http_cfg)
ranked_http = http_reranker.rerank(
query=query,
graph_results=items, # uses item.memory internally as documents
top_k=10,
)
show_ranked("HTTP BGE Reranker (OpenAI-style API)", ranked_http, top_n=5)
# --- NEW: search_filter with rerank ---
# hit rule:
# - user_id == "u_123" → score * (1 + 0.5) = 1.5
# - tags including "paris" → score * (1 + 0.2) = 1.2
# - project_id(not exist) → warning unrelated with score
search_filter = {"session_id": "germany", "tags": "germany", "project_id": "demo-p1"}
ranked_http_boosted = http_reranker.rerank(
query=query,
graph_results=items,
top_k=10,
search_filter=search_filter,
)
show_ranked("HTTP BGE Reranker (with search_filter boosts)", ranked_http_boosted, top_n=5)
else:
print("\n[Info] Skipped HTTP BGE scenario because BGE_RERANKER_URL is not set.")
if __name__ == "__main__":
main()