Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
217 commits
Select commit Hold shift + click to select a range
65d5649
update reader and search strategy
Oct 28, 2025
6cad866
set strategy reader and search config
Oct 29, 2025
f040110
fix all reader conflicts
Oct 29, 2025
c389367
fix install problem
Oct 29, 2025
499502d
fix
Oct 29, 2025
e1bb223
fix test
Oct 29, 2025
72b7466
Merge branch 'dev' into dev_test
CaralHsi Oct 29, 2025
74585e8
Merge branch 'dev' into dev_test
fridayL Oct 30, 2025
790e99f
turn off graph recall
Oct 30, 2025
15b63a7
Merge branch 'dev' into dev_test
Oct 30, 2025
390ba29
turn off graph recall
Oct 30, 2025
9615282
turn off graph recall
Oct 30, 2025
2fb8ce0
Merge branch 'dev' into dev_test
fridayL Oct 30, 2025
6035522
Merge branch 'dev' into dev_test
Oct 30, 2025
04f412b
fix Searcher input bug
Oct 30, 2025
9716274
fix Searcher
Oct 30, 2025
c455a4e
Merge branch 'dev_test' of github.com:whipser030/MemOS into dev_test
Oct 30, 2025
f8b9b4a
fix Search
Oct 30, 2025
c840ad4
Merge branch 'dev' into dev_test
Oct 30, 2025
b9dbecd
fix bug
Nov 4, 2025
1798f60
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Nov 4, 2025
6db95e7
Merge branch 'dev' into dev_test
Nov 4, 2025
1173c07
adjust strategy reader
Nov 4, 2025
7ab465b
Merge branch 'dev' into dev_test
Nov 4, 2025
744d227
adjust strategy reader
Nov 4, 2025
a9a98fa
adjust search config input
Nov 4, 2025
900f5e6
reformat code
Nov 4, 2025
ac7aff5
Merge branch 'dev' into dev_test
CaralHsi Nov 4, 2025
144c446
re pr
Nov 5, 2025
a2b55c7
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Nov 5, 2025
441c52b
Merge branch 'dev' into dev_test
Nov 5, 2025
6f272db
Merge branch 'dev_test' of github.com:whipser030/MemOS into dev_test
Nov 5, 2025
f506d3e
format repair
Nov 5, 2025
db9041c
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Nov 5, 2025
d921284
Merge branch 'dev' into dev_test
CaralHsi Nov 5, 2025
d036c53
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Nov 11, 2025
5a3f0db
Merge branch 'dev' into dev_test
Nov 11, 2025
dc67413
fix time issue
Nov 11, 2025
7699b9a
Merge branch 'dev_test' of github.com:whipser030/MemOS into dev_test
Nov 11, 2025
8bfbf94
develop feedback process
Nov 19, 2025
875c551
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Nov 19, 2025
7f20f8b
Resolve merge conflicts
Nov 19, 2025
4d712eb
feedback handler configuration
Nov 20, 2025
36b93eb
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Nov 25, 2025
adec73e
merged
Nov 25, 2025
aef3aad
upgrade feedback using
Nov 26, 2025
81ec520
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Nov 26, 2025
55c9d89
fix
Nov 26, 2025
b4fbfde
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Nov 27, 2025
ee64719
Merge branch 'dev' into dev_test
Nov 27, 2025
0fa9be7
add threshold
Nov 27, 2025
4a4746e
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Nov 27, 2025
16de8da
Merge branch 'dev' into dev_test
Nov 27, 2025
facb7b3
update prompt
Nov 27, 2025
eab5fe6
update prompt
Nov 27, 2025
7577aac
fix handler
Nov 27, 2025
cc4069d
add feedback scheduler
Nov 29, 2025
2529db2
add handler change node update
Dec 1, 2025
898ccac
add handler change node update
Dec 1, 2025
faec340
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 1, 2025
913c24d
add handler change node update
Dec 1, 2025
91d063d
add handler change node update
Dec 1, 2025
2a47880
add handler change node update
Dec 1, 2025
c5618c6
Merge branch 'dev' into dev_test
whipser030 Dec 2, 2025
b9737f1
Merge branch 'dev' into dev_test
CaralHsi Dec 2, 2025
ad9c2e7
fix interface input
Dec 2, 2025
c0c32b1
Merge branch 'dev_test' of github.com:whipser030/MemOS into dev_test
Dec 2, 2025
d906f0d
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 2, 2025
696708e
fix interface input
Dec 2, 2025
6ad8dae
add chunk and ratio filter
Dec 3, 2025
6298c64
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 3, 2025
47acd7a
Merge branch 'dev' into dev_test
Dec 3, 2025
0727c25
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 3, 2025
0b0342d
Merge branch 'dev' into dev_test
Dec 3, 2025
294c1e6
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 3, 2025
d9158e4
Merge branch 'dev' into dev_test
Dec 3, 2025
699cdf7
update stopwords
Dec 3, 2025
8ca03c0
Merge branch 'dev' into dev_test
fridayL Dec 3, 2025
6076935
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 4, 2025
b2b0f6e
Merge branch 'dev' into dev_test
Dec 4, 2025
343eeb3
fix messages queue
Dec 4, 2025
1bb9396
Merge branch 'dev_test' of github.com:whipser030/MemOS into dev_test
Dec 4, 2025
045196c
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 4, 2025
7131c35
Merge branch 'dev' into dev_test
Dec 4, 2025
d66e8ce
add seach_by_keywords_LIKE
Dec 7, 2025
d081aaa
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 7, 2025
405658f
Merge branch 'dev' into dev_test
Dec 7, 2025
ae60994
add doc filter
Dec 9, 2025
70efbf3
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 9, 2025
a613c7e
merge dev
Dec 9, 2025
7b0f2f4
add retrieve query
Dec 9, 2025
c6768b6
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 9, 2025
005a5bb
add retrieve queies
Dec 10, 2025
d69e7f4
patch info filter
Dec 10, 2025
d4f18e8
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 10, 2025
3c5199a
add strict info filter
Dec 11, 2025
365e0b6
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 12, 2025
9bc942d
Merge branch 'dev' into dev_test
Dec 12, 2025
eab3d80
add log and make embedding safety net
Dec 12, 2025
9519f5e
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 12, 2025
f21a885
Merge branch 'dev' into dev_test
Dec 12, 2025
7f146e1
add log and make embedding safety net
Dec 12, 2025
4cc4677
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 15, 2025
c01c900
Merge branch 'dev' into dev_test
Dec 15, 2025
4da6d31
deduplicate add objects
Dec 16, 2025
28934c8
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 16, 2025
d3f0a77
Merge branch 'dev' into dev_test
Dec 16, 2025
13e8d16
Merge branch 'dev' into dev_test
CaralHsi Dec 16, 2025
fd2816c
use _add_memories_parallel
Dec 17, 2025
dfe62dc
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 17, 2025
192d150
Merge branch 'dev' into dev_test
Dec 17, 2025
e6ce0ee
Merge branch 'dev_test' of github.com:whipser030/MemOS into dev_test
Dec 17, 2025
02585f2
Merge branch 'dev' into dev_test
fridayL Dec 17, 2025
39b0b20
delete Special characters
Dec 17, 2025
81fa434
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 17, 2025
0a10a52
Merge branch 'dev' into dev_test
Dec 17, 2025
991092e
Merge branch 'dev_test' of github.com:whipser030/MemOS into dev_test
Dec 17, 2025
b7b5003
delete Special characters
Dec 17, 2025
95bb061
delete Special characters
Dec 17, 2025
14d6732
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 17, 2025
c8ea8ae
Merge branch 'dev' into dev_test
Dec 17, 2025
a2fe6ed
delete Special characters
Dec 17, 2025
6274864
add source_doc_id
Dec 17, 2025
5f19846
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 17, 2025
a06e5f7
Merge branch 'dev' into dev_test
Dec 17, 2025
f2aec38
add source_doc_id
Dec 17, 2025
45f4957
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 18, 2025
bd612b9
Merge branch 'dev' into dev_test
Dec 18, 2025
d34812b
add reranker in init com..
Dec 18, 2025
5b681be
Merge branch 'dev' into dev_test
fridayL Dec 18, 2025
3919dcf
fix circle import
Dec 18, 2025
56df680
Merge branch 'dev_test' of github.com:whipser030/MemOS into dev_test
Dec 18, 2025
dd1aef9
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 18, 2025
777d6e0
Merge branch 'dev' into dev_test
Dec 18, 2025
353e417
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 22, 2025
bd8651c
Merge branch 'dev' into dev_test
Dec 22, 2025
73106ed
add feedback judgement
Dec 23, 2025
b7ffa5a
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 23, 2025
f37b15b
Merge branch 'dev' into dev_test
Dec 23, 2025
1b0e3af
add feedback judgement
Dec 23, 2025
f6d8f77
add pref feedback
Dec 24, 2025
9137781
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 24, 2025
ef36b5b
add pref feedback
Dec 24, 2025
ed34f0c
add pref feedback
Dec 24, 2025
fd50e90
Merge branch 'dev' into dev_test
fridayL Dec 25, 2025
653d900
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 29, 2025
827a503
patch: get_memory func filter user id and make page chunk
Dec 29, 2025
82ea9ee
Merge branch 'dev' into dev_test
Dec 29, 2025
8e28619
Merge branch 'dev_test' of github.com:whipser030/MemOS into dev_test
Dec 29, 2025
3dd09d5
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 29, 2025
a779823
Merge branch 'dev' into dev_test
Dec 29, 2025
4fab261
add total num
Dec 30, 2025
29f63c0
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 30, 2025
6aacaa7
Merge branch 'dev' into dev_test
Dec 30, 2025
b5f71a5
add total num
Dec 30, 2025
098a830
add milvus pagination
Dec 30, 2025
dd8cd80
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 30, 2025
9eca446
Merge branch 'dev' into dev_test
Dec 30, 2025
3fd1743
fix merge implicit explicit pref
Dec 30, 2025
963aa91
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Dec 30, 2025
031876c
fix merge implicit explicit pref
Dec 30, 2025
af610fe
fix merge implicit explicit pref
Dec 30, 2025
eab2303
fix merge implicit explicit pref
Dec 30, 2025
83061d1
fix json load bug
Jan 7, 2026
6707cc2
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Jan 7, 2026
a0989fd
Merge branch 'dev' into dev_test
Jan 7, 2026
48d4ff1
knowledge raw_text replace memory
Jan 8, 2026
f5b7547
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Jan 8, 2026
10d8b93
Merge branch 'dev' into dev_test
Jan 8, 2026
21c12cd
knowledge raw_text replace memory
Jan 8, 2026
7164b71
knowledge raw_text replace memory
Jan 8, 2026
d6e5bfa
Merge branch 'dev' of github.com:MemTensor/MemOS into dev
Jan 12, 2026
4a195dc
Merge branch 'dev' into dev_test
Jan 12, 2026
47f2100
Merge branch 'dev-20260112-v2.0.2' of github.com:MemTensor/MemOS into…
Jan 12, 2026
adff60a
unuse rerank
Jan 12, 2026
a1179da
Merge branch 'dev-20260112-v2.0.2' of github.com:MemTensor/MemOS into…
Jan 12, 2026
33aab5e
Merge branch 'dev_release' into dev_test
Jan 12, 2026
1f795ab
backtrack knowledge retrieval
Jan 13, 2026
7b9941b
Merge branch 'dev-20260112-v2.0.2' of github.com:MemTensor/MemOS into…
Jan 13, 2026
e2ee965
Merge branch 'dev_release' into dev_test
Jan 13, 2026
a5e423f
norerank knowledge data
Jan 14, 2026
cdfea53
Merge branch 'dev-20260112-v2.0.2' of github.com:MemTensor/MemOS into…
Jan 14, 2026
4ff53e7
norerank knowledge data
Jan 14, 2026
a2e8781
use embedding rerank knowledge
Jan 14, 2026
4d9a86d
Merge branch 'dev-20260112-v2.0.2' of github.com:MemTensor/MemOS into…
Jan 14, 2026
f3e1b92
Merge branch 'dev_release' into dev_test
Jan 14, 2026
456f830
add chunk memories
Jan 15, 2026
cb34aaf
Merge branch 'dev-20260112-v2.0.2' of github.com:MemTensor/MemOS into…
Jan 15, 2026
f856060
feat: mix search upload file raw content
Jan 18, 2026
0883f22
Merge branch 'dev-20260112-v2.0.2' of github.com:MemTensor/MemOS into…
Jan 18, 2026
2e1edf2
feat: search upload raw file
Jan 18, 2026
444eba3
feedback soures set empty
Jan 18, 2026
976981d
chat upload file can be chunk
Jan 19, 2026
61ab9b5
Merge branch 'dev-20260119-v2.0.3' of github.com:MemTensor/MemOS into…
Jan 19, 2026
217465f
fix: when rerank query is too long
Jan 19, 2026
6bf4499
fix too long rerank
Jan 20, 2026
b5c286e
fix too long rerank
Jan 20, 2026
1666e40
Merge branch 'dev_release' into dev_test
Jan 20, 2026
4a8248f
make mem_reader config
Jan 20, 2026
3e6f036
make mem_reader config
Jan 20, 2026
e8b82c4
transmit search_memory_type param
Feb 2, 2026
ddcddea
refactor format
Feb 3, 2026
6507a8f
feat: Add edges to the knowledge base memory
Feb 4, 2026
f65a004
add file url to knowledge db
Feb 4, 2026
c90c73a
add log
Feb 4, 2026
7b28fe5
Merge branch 'dev-20260202-v2.0.5' into dev_test_0204
Feb 4, 2026
f177492
fix
Feb 4, 2026
f1a1f74
search neighbor_discovery
Feb 4, 2026
138add0
record fallback ratio
Feb 5, 2026
0a823e7
Merge branch 'dev-20260202-v2.0.5' into dev_test_0204
Feb 5, 2026
e55188d
feedback do not change memories with edges
Feb 5, 2026
631d658
Merge branch 'dev-20260202-v2.0.5' into dev_test_0204
Feb 5, 2026
a15e23d
add skill
Feb 5, 2026
0eabb67
fix test
Feb 5, 2026
f583bf9
Merge branch 'dev-20260202-v2.0.5' into dev_test_0204
Feb 6, 2026
1a846ee
Merge branch 'dev-20260202-v2.0.5' into dev_test_0204
CaralHsi Feb 6, 2026
c53a698
Merge branch 'dev-20260202-v2.0.5' into dev_test_0204
whipser030 Feb 6, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions src/memos/api/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -538,6 +538,10 @@ def get_internet_config() -> dict[str, Any]:
"chunker": {
"backend": "sentence",
"config": {
"save_rawfile": os.getenv(
"MEM_READER_SAVE_RAWFILENODE", "true"
).lower()
== "true",
"tokenizer_or_token_counter": "gpt2",
"chunk_size": 512,
"chunk_overlap": 128,
Expand Down Expand Up @@ -804,6 +808,8 @@ def get_product_default_config() -> dict[str, Any]:
"chunker": {
"backend": "sentence",
"config": {
"save_rawfile": os.getenv("MEM_READER_SAVE_RAWFILENODE", "true").lower()
== "true",
"tokenizer_or_token_counter": "gpt2",
"chunk_size": 512,
"chunk_overlap": 128,
Expand Down Expand Up @@ -919,6 +925,8 @@ def create_user_config(user_name: str, user_id: str) -> tuple["MOSConfig", "Gene
"chunker": {
"backend": "sentence",
"config": {
"save_rawfile": os.getenv("MEM_READER_SAVE_RAWFILENODE", "true").lower()
== "true",
"tokenizer_or_token_counter": "gpt2",
"chunk_size": 512,
"chunk_overlap": 128,
Expand Down
10 changes: 5 additions & 5 deletions src/memos/api/handlers/formatters_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def post_process_textual_mem(
mem
for mem in text_formatted_mem
if mem["metadata"]["memory_type"]
in ["WorkingMemory", "LongTermMemory", "UserMemory", "OuterMemory"]
in ["WorkingMemory", "LongTermMemory", "UserMemory", "OuterMemory", "RawFileMemory"]
]
tool_mem = [
mem
Expand Down Expand Up @@ -157,12 +157,13 @@ def separate_knowledge_and_conversation_mem(memories: list[dict[str, Any]]):
for item in memories:
sources = item.get("metadata", {}).get("sources", [])
if (
len(sources) > 0
item["metadata"]["memory_type"] != "RawFileMemory"
and len(sources) > 0
and "type" in sources[0]
and sources[0]["type"] == "file"
and "content" in sources[0]
and sources[0]["content"] != ""
): # TODO change to memory_type
):
knowledge_mem.append(item)
else:
conversation_mem.append(item)
Expand Down Expand Up @@ -203,8 +204,7 @@ def rerank_knowledge_mem(
key=lambda item: item.get("metadata", {}).get("relativity", 0.0),
reverse=True,
)

# TODO revoke sources replace memory value
# replace memory value with source.content for LongTermMemory, WorkingMemory or UserMemory
for item in reranked_knowledge_mem:
item["memory"] = item["metadata"]["sources"][0]["content"]
item["metadata"]["sources"] = []
Expand Down
11 changes: 9 additions & 2 deletions src/memos/api/product_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,8 +422,8 @@ class APISearchRequest(BaseRequest):
)
# Internal field for search memory type
search_memory_type: str = Field(
"All",
description="Type of memory to search: All, WorkingMemory, LongTermMemory, UserMemory, OuterMemory, ToolSchemaMemory, ToolTrajectoryMemory, SkillMemory",
"AllSummaryMemory",
description="Type of memory to search: All, WorkingMemory, LongTermMemory, UserMemory, OuterMemory, ToolSchemaMemory, ToolTrajectoryMemory, RawFileMemory, AllSummaryMemory, SkillMemory",
)

# ==== Context ====
Expand Down Expand Up @@ -461,6 +461,13 @@ class APISearchRequest(BaseRequest):
description="Source of the search query [plugin will router diff search]",
)

neighbor_discovery: bool = Field(
False,
description="Whether to enable neighbor discovery. "
"If enabled, the system will automatically recall neighbor chunks "
"relevant to the query. Default: False.",
)

@model_validator(mode="after")
def _convert_deprecated_fields(self) -> "APISearchRequest":
"""
Expand Down
1 change: 1 addition & 0 deletions src/memos/configs/chunker.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ class BaseChunkerConfig(BaseConfig):
chunk_size: int = Field(default=512, description="Maximum tokens per chunk")
chunk_overlap: int = Field(default=128, description="Overlap between chunks")
min_sentences_per_chunk: int = Field(default=1, description="Minimum sentences in each chunk")
save_rawfile: bool = Field(default=True, description="Whether to save rawfile") # TODO


class SentenceChunkerConfig(BaseChunkerConfig):
Expand Down
51 changes: 38 additions & 13 deletions src/memos/mem_feedback/feedback.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,20 +235,16 @@ def _single_add_operation(
to_add_memory.metadata.tags = new_memory_item.metadata.tags
to_add_memory.memory = new_memory_item.memory
to_add_memory.metadata.embedding = new_memory_item.metadata.embedding

to_add_memory.metadata.user_id = new_memory_item.metadata.user_id
to_add_memory.metadata.created_at = to_add_memory.metadata.updated_at = (
datetime.now().isoformat()
)
to_add_memory.metadata.background = new_memory_item.metadata.background
else:
to_add_memory = new_memory_item.model_copy(deep=True)
to_add_memory.metadata.created_at = to_add_memory.metadata.updated_at = (
datetime.now().isoformat()
)
to_add_memory.metadata.background = new_memory_item.metadata.background

to_add_memory.id = ""
to_add_memory.metadata.created_at = to_add_memory.metadata.updated_at = (
datetime.now().isoformat()
)
to_add_memory.metadata.background = new_memory_item.metadata.background
to_add_memory.metadata.sources = []

added_ids = self._retry_db_operation(
lambda: self.memory_manager.add([to_add_memory], user_name=user_name, use_batch=False)
)
Expand Down Expand Up @@ -626,10 +622,39 @@ def _info_comparison(self, memory: TextualMemoryItem, _info: dict, include_keys:

def _retrieve(self, query: str, info=None, top_k=20, user_name=None):
"""Retrieve memory items"""
retrieved_mems = self.searcher.search(
query, info=info, user_name=user_name, top_k=top_k, full_recall=True

def check_has_edges(mem_item: TextualMemoryItem) -> tuple[TextualMemoryItem, bool]:
"""Check if a memory item has edges."""
edges = self.searcher.graph_store.get_edges(mem_item.id, user_name=user_name)
return (mem_item, len(edges) == 0)

text_mems = self.searcher.search(
query,
info=info,
memory_type="AllSummaryMemory",
user_name=user_name,
top_k=top_k,
full_recall=True,
)
retrieved_mems = [item[0] for item in retrieved_mems if float(item[1]) > 0.01]
text_mems = [item[0] for item in text_mems if float(item[1]) > 0.01]

# Memory with edges is not modified by feedback
retrieved_mems = []
with ContextThreadPoolExecutor(max_workers=10) as executor:
futures = {executor.submit(check_has_edges, item): item for item in text_mems}
for future in concurrent.futures.as_completed(futures):
try:
mem_item, has_no_edges = future.result()
if has_no_edges:
retrieved_mems.append(mem_item)
except Exception as e:
logger.error(f"[0107 Feedback Core: _retrieve] Error checking edges: {e}")

if len(retrieved_mems) < len(text_mems):
logger.info(
f"[0107 Feedback Core: _retrieve] {len(text_mems) - len(retrieved_mems)} "
f"text memories are not modified by feedback due to edges."
)

if self.pref_feedback:
pref_info = {}
Expand Down
112 changes: 107 additions & 5 deletions src/memos/mem_reader/multi_modal_struct.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,7 @@ def _build_window_from_items(
# Collect all memory texts and sources
memory_texts = []
all_sources = []
seen_content = set() # Track seen source content to avoid duplicates
roles = set()
aggregated_file_ids: list[str] = []

Expand All @@ -301,8 +302,18 @@ def _build_window_from_items(
item_sources = [item_sources]

for source in item_sources:
# Add source to all_sources
all_sources.append(source)
# Get content from source for deduplication
source_content = None
if isinstance(source, dict):
source_content = source.get("content", "")
else:
source_content = getattr(source, "content", "") or ""

# Only add if content is different (empty content is considered unique)
content_key = source_content if source_content else None
if content_key and content_key not in seen_content:
seen_content.add(content_key)
all_sources.append(source)

# Extract role from source
if hasattr(source, "role") and source.role:
Expand Down Expand Up @@ -464,7 +475,10 @@ def _determine_prompt_type(self, sources: list) -> str:
source_role = source.get("role")
if source_role in {"user", "assistant", "system", "tool"}:
prompt_type = "chat"

if hasattr(source, "type"):
source_type = source.type
if source_type == "file":
prompt_type = "doc"
return prompt_type

def _get_maybe_merged_memory(
Expand Down Expand Up @@ -641,11 +655,14 @@ def _process_string_fine(
) -> list[TextualMemoryItem]:
"""
Process fast mode memory items through LLM to generate fine mode memories.
Where fast_memory_items are raw chunk memory items, not the final memory items.
"""
if not fast_memory_items:
return []

def _process_one_item(fast_item: TextualMemoryItem) -> list[TextualMemoryItem]:
def _process_one_item(
fast_item: TextualMemoryItem, chunk_idx: int, total_chunks: int
) -> list[TextualMemoryItem]:
"""Process a single fast memory item and return a list of fine items."""
fine_items: list[TextualMemoryItem] = []

Expand Down Expand Up @@ -749,12 +766,40 @@ def _process_one_item(fast_item: TextualMemoryItem) -> list[TextualMemoryItem]:
except Exception as e:
logger.error(f"[MultiModalFine] parse error: {e}")

# save rawfile node
if self.save_rawfile and prompt_type == "doc" and len(fine_items) > 0:
rawfile_chunk = mem_str
file_info = fine_items[0].metadata.sources[0].file_info
source = self.multi_modal_parser.file_content_parser.create_source(
message={"file": file_info},
info=info_per_item,
chunk_index=chunk_idx,
chunk_total=total_chunks,
chunk_content="",
)
rawfile_node = self._make_memory_item(
value=rawfile_chunk,
info=info_per_item,
memory_type="RawFileMemory",
tags=[
"mode:fine",
"multimodal:file",
f"chunk:{chunk_idx + 1}/{total_chunks}",
],
sources=[source],
)
rawfile_node.metadata.summary_ids = [mem_node.id for mem_node in fine_items]
fine_items.append(rawfile_node)
return fine_items

fine_memory_items: list[TextualMemoryItem] = []
total_chunks_len = len(fast_memory_items)

with ContextThreadPoolExecutor(max_workers=30) as executor:
futures = [executor.submit(_process_one_item, item) for item in fast_memory_items]
futures = [
executor.submit(_process_one_item, item, idx, total_chunks_len)
for idx, item in enumerate[TextualMemoryItem](fast_memory_items)
]

for future in concurrent.futures.as_completed(futures):
try:
Expand All @@ -764,6 +809,63 @@ def _process_one_item(fast_item: TextualMemoryItem) -> list[TextualMemoryItem]:
except Exception as e:
logger.error(f"[MultiModalFine] worker error: {e}")

# related preceding and following rawfilememories
fine_memory_items = self._relate_preceding_following_rawfile_memories(fine_memory_items)
return fine_memory_items

def _relate_preceding_following_rawfile_memories(
self, fine_memory_items: list[TextualMemoryItem]
) -> list[TextualMemoryItem]:
"""
Relate RawFileMemory items to each other by setting preceding_id and following_id.
"""
# Filter RawFileMemory items and track their original positions
rawfile_items_with_pos = []
for idx, item in enumerate[TextualMemoryItem](fine_memory_items):
if (
hasattr(item.metadata, "memory_type")
and item.metadata.memory_type == "RawFileMemory"
):
rawfile_items_with_pos.append((idx, item))

if len(rawfile_items_with_pos) <= 1:
return fine_memory_items

def get_chunk_idx(item_with_pos) -> int:
"""Extract chunk_idx from item's source metadata."""
_, item = item_with_pos
if item.metadata.sources and len(item.metadata.sources) > 0:
source = item.metadata.sources[0]
# Handle both SourceMessage object and dict
if isinstance(source, dict):
file_info = source.get("file_info")
if file_info and isinstance(file_info, dict):
chunk_idx = file_info.get("chunk_index")
if chunk_idx is not None:
return chunk_idx
else:
# SourceMessage object
file_info = getattr(source, "file_info", None)
if file_info and isinstance(file_info, dict):
chunk_idx = file_info.get("chunk_index")
if chunk_idx is not None:
return chunk_idx
return float("inf")

# Sort items by chunk_index
sorted_rawfile_items_with_pos = sorted(rawfile_items_with_pos, key=get_chunk_idx)

# Relate adjacent items
for i in range(len(sorted_rawfile_items_with_pos) - 1):
_, current_item = sorted_rawfile_items_with_pos[i]
_, next_item = sorted_rawfile_items_with_pos[i + 1]
current_item.metadata.following_id = next_item.id
next_item.metadata.preceding_id = current_item.id

# Replace sorted items back to original positions in fine_memory_items
for orig_idx, item in sorted_rawfile_items_with_pos:
fine_memory_items[orig_idx] = item

return fine_memory_items

def _get_llm_tool_trajectory_response(self, mem_str: str) -> dict:
Expand Down
Loading