From 81ee252285edd92c500a0e30d6750104e854cb31 Mon Sep 17 00:00:00 2001 From: Mayank Solanki Date: Sun, 20 Oct 2024 17:53:20 +0530 Subject: [PATCH] removed client history --- .../docs/examples/memory/Mem0ChatEngine.ipynb | 217 +++++++++++++++++- .../examples/memory/Mem0FunctionalAgent.ipynb | 60 ++--- .../llama_index/memory/mem0/__init__.py | 4 +- .../llama_index/memory/mem0/base.py | 96 ++++---- 4 files changed, 290 insertions(+), 87 deletions(-) diff --git a/docs/docs/examples/memory/Mem0ChatEngine.ipynb b/docs/docs/examples/memory/Mem0ChatEngine.ipynb index c9fef165d8d6e..38bdb52b3bfcd 100644 --- a/docs/docs/examples/memory/Mem0ChatEngine.ipynb +++ b/docs/docs/examples/memory/Mem0ChatEngine.ipynb @@ -23,13 +23,13 @@ "outputs": [], "source": [ "# Initializing memory\n", - "from llama_index.memory.mem0 import Mem0Composable\n", + "from llama_index.memory.mem0 import Mem0ComposableMemory\n", "context_dict = {\n", - " \"user_id\": \"mayank_10\"\n", + " \"user_id\": \"david_1\"\n", "}\n", - "memory = Mem0Composable.from_client(\n", + "memory = Mem0ComposableMemory.from_client(\n", " context_dict=context_dict,\n", - " api_key=\"\"\n", + " api_key=\"\"\n", ")" ] }, @@ -41,7 +41,7 @@ "source": [ "# Initialize llm\n", "import os\n", - "os.environ[\"OPENAI_API_KEY\"] = \"\"\n", + "os.environ[\"OPENAI_API_KEY\"] = \"\"\n", "from llama_index.llms.openai import OpenAI\n", "llm = OpenAI(model=\"gpt-4o\")" ] @@ -69,7 +69,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Hello Mayank! How can I assist you today?\n" + "Hello, Mayank! How can I assist you today?\n" ] } ], @@ -87,7 +87,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "That sounds exciting! San Francisco has a lot to offer. Do you have any specific places or activities in mind that you’d like to explore while you’re there?\n" + "That sounds exciting! San Francisco has a lot to offer. Do you have any specific places or activities in mind that you’d like to explore, or would you like some recommendations?\n" ] } ], @@ -105,7 +105,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "When scheduling a meeting in San Francisco, it's important to consider factors like traffic and popular tourist times. Generally, late morning or early afternoon, around 11:00 AM to 2:00 PM, can be a good time to avoid rush hour and have a productive meeting. However, it ultimately depends on your itinerary and preferences. Let me know if you need more specific advice!\n" + "When scheduling a meeting in San Francisco, it's important to consider the city's traffic patterns and your itinerary. Generally, late morning or early afternoon, around 11:00 AM to 2:00 PM, can be a good time to avoid rush hour traffic and have a productive meeting. However, if you have specific plans or commitments, it might be best to schedule around those. Let me know if you need more help!\n" ] } ], @@ -155,7 +155,7 @@ "}\n", "\n", "# Initialize memory\n", - "memory = Mem0Composable.from_config(\n", + "memory = Mem0ComposableMemory.from_config(\n", " confif_dict=config,\n", " context_dict=context_dict\n", ")" @@ -200,7 +200,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "That sounds exciting! San Francisco has a lot to offer. Do you have any specific places or activities in mind that you’d like to explore while you’re there?\n" + "That sounds exciting! San Francisco has a lot to offer. Do you have any specific places or activities in mind that you want to explore, or would you like some recommendations?\n" ] } ], @@ -218,7 +218,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "Since you mentioned you're planning to visit San Francisco, you might consider checking out some popular spots like the Golden Gate Bridge, Alcatraz Island, Fisherman's Wharf, or exploring neighborhoods like Chinatown and Haight-Ashbury. If you have specific interests, let me know, and I can suggest more tailored activities!\n" + "Since you mentioned you're planning to visit San Francisco, here are a few popular places you might consider exploring:\n", + "\n", + "1. **Golden Gate Bridge** - A must-see iconic landmark.\n", + "2. **Alcatraz Island** - Take a ferry to explore the historic prison.\n", + "3. **Fisherman's Wharf** - Great for seafood and waterfront views.\n", + "4. **Chinatown** - Experience the vibrant culture and cuisine.\n", + "5. **Golden Gate Park** - Perfect for a leisurely stroll or a visit to the museums.\n", + "6. **Lombard Street** - Known for its steep, winding road.\n", + "\n", + "Let me know if you need more information or have other interests!\n" ] } ], @@ -226,6 +235,192 @@ "response = agent.chat(\"Where I will be visiting tommorow?\")\n", "print(response)" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Tests" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[{'id': '63012c6b-9a5e-4e89-a025-680752c9e649', 'data': {'old_memory': 'Likes to eat pizza in snacks over burger', 'new_memory': 'Likes to eat chowmein in snacks over pizza'}, 'event': 'UPDATE'}, {'id': '1aeb2b94-5254-489e-ae1c-71ca7e98fa02', 'data': {'memory': 'Likes to eat burger in snacks'}, 'event': 'ADD'}]\n" + ] + } + ], + "source": [ + "from mem0 import MemoryClient\n", + "client = MemoryClient(api_key=\"m0-CyWinV6RbPWGgnMVp3Dn7RlrolmRdZXvQvJIox6R\")\n", + "\n", + "# Add some message to create history\n", + "messages = [{\"role\": \"user\", \"content\": \"Hi, My name is mayank\"}]\n", + "\n", + "# # Add second message to update history\n", + "messages.append({\"role\": \"assistant\", \"content\": \"Hi, Mayank nice to meet you.\"})\n", + "messages.append({\"role\": \"user\", \"content\": \"I like to Burger in snacks.\"})\n", + "messages.append({\"role\": \"assistant\", \"content\": \"Okay, noted your prefrences.\"})\n", + "messages.append({\"role\": \"user\", \"content\": \"I like to eat pizza in snacks over burger.\"})\n", + "messages.append({'role': 'user', 'content': 'I like to eat chowmein in snacks over pizza'})\n", + "\n", + "res = client.add(messages, user_id=\"raghu_2\")\n", + "print(res)\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[{'id': '9f75e83e-8b38-49c5-84dc-bf18db377aaa', 'data': {'memory': 'Leaving for Vietnam tomorrow'}, 'event': 'ADD'}]\n" + ] + } + ], + "source": [ + "messages2 = [{\"role\": \"assistant\", \"content\": \"Hi Raghu, You can visit taj mahal in India. Also you can go to Delhi\"}]\n", + "res = client.add(\"I have to leave to Vietnam tommorow\", user_id=\"raghu_2\")\n", + "print(res)" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": {}, + "outputs": [], + "source": [ + "# \n", + "# [{'id': 'fdb9d075-6e25-4a57-b993-adabdefd9041', 'data': {'memory': 'Name is Mayank'}, 'event': 'ADD'}, {'id': '63012c6b-9a5e-4e89-a025-680752c9e649', 'data': {'memory': 'Likes burgers as snacks'}, 'event': 'ADD'}]\n", + "# [{'id': '63012c6b-9a5e-4e89-a025-680752c9e649', 'data': {'old_memory': 'Likes burgers as snacks', 'new_memory': 'Likes to eat pizza in snacks over burger'}, 'event': 'UPDATE'}]\n", + "[\n", + " {'id': 'e5af43ba-d396-4186-9ba5-12ab8918b262', \n", + " 'memory_id': '63012c6b-9a5e-4e89-a025-680752c9e649', \n", + " 'input': [\n", + " {'role': 'user', 'content': 'Hi, My name is mayank'}, \n", + " {'role': 'assistant', 'content': 'Hi, Mayank nice to meet you.'}, \n", + " {'role': 'user', 'content': 'I like to Burger in snacks.'}, \n", + " {'role': 'assistant', 'content': 'Okay, noted your prefrences.'},\n", + " {'role': 'assistant', 'content': 'I like to eat chowmein in snacks over pizza'}],\n", + " \n", + " 'old_memory': None, \n", + " 'new_memory': 'Likes burgers as snacks', \n", + " 'user_id': 'raghu_2', \n", + " 'categories': ['food'], \n", + " 'event': 'ADD', 'metadata': None, 'created_at': '2024-10-20T02:43:39.908162-07:00', 'updated_at': '2024-10-20T02:43:39.893717-07:00'}, {'id': 'f20f07b6-0dfc-4396-95a0-1dd5eee82fea', 'memory_id': '63012c6b-9a5e-4e89-a025-680752c9e649', 'input': [{'role': 'user', 'content': 'Hi, My name is mayank'}, {'role': 'assistant', 'content': 'Hi, Mayank nice to meet you.'}, {'role': 'user', 'content': 'I like to Burger in snacks.'}, {'role': 'assistant', 'content': 'Okay, noted your prefrences.'}], 'old_memory': 'Likes burgers as snacks', 'new_memory': 'Likes to eat pizza in snacks over burger', 'user_id': 'raghu_2', 'categories': ['food'], 'event': 'UPDATE', 'metadata': None, 'created_at': '2024-10-20T02:46:32.650687-07:00', 'updated_at': '2024-10-20T02:46:32.637414-07:00'}]\n", + "\n", + "# Get history of how memory changed over time\n", + "memory_id = \"63012c6b-9a5e-4e89-a025-680752c9e649\"\n", + "history = client.history(memory_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[{'id': 'e5af43ba-d396-4186-9ba5-12ab8918b262', 'memory_id': '63012c6b-9a5e-4e89-a025-680752c9e649', 'input': [{'role': 'user', 'content': 'Hi, My name is mayank'}, {'role': 'assistant', 'content': 'Hi, Mayank nice to meet you.'}, {'role': 'user', 'content': 'I like to Burger in snacks.'}, {'role': 'assistant', 'content': 'Okay, noted your prefrences.'}], 'old_memory': None, 'new_memory': 'Likes burgers as snacks', 'user_id': 'raghu_2', 'categories': ['food'], 'event': 'ADD', 'metadata': None, 'created_at': '2024-10-20T02:43:39.908162-07:00', 'updated_at': '2024-10-20T02:43:39.893717-07:00'}, {'id': 'f20f07b6-0dfc-4396-95a0-1dd5eee82fea', 'memory_id': '63012c6b-9a5e-4e89-a025-680752c9e649', 'input': [{'role': 'user', 'content': 'Hi, My name is mayank'}, {'role': 'assistant', 'content': 'Hi, Mayank nice to meet you.'}, {'role': 'user', 'content': 'I like to Burger in snacks.'}, {'role': 'assistant', 'content': 'Okay, noted your prefrences.'}], 'old_memory': 'Likes burgers as snacks', 'new_memory': 'Likes to eat pizza in snacks over burger', 'user_id': 'raghu_2', 'categories': ['food'], 'event': 'UPDATE', 'metadata': None, 'created_at': '2024-10-20T02:46:32.650687-07:00', 'updated_at': '2024-10-20T02:46:32.637414-07:00'}]\n" + ] + } + ], + "source": [ + "print(history)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "[{'id': '3265a55c-cee5-45b2-987e-679b8e177771', 'memory_id': '0c073fdf-a417-4c18-9f8a-3ea2a5d50e8f', 'input': [{'role': 'user', 'content': 'Hi, My name is Mayank.'}], 'old_memory': None, 'new_memory': 'Name is Mayank', 'user_id': 'mayank_21', 'categories': ['personal_details'], 'event': 'ADD', 'metadata': None, 'created_at': '2024-10-20T02:08:07.334859-07:00', 'updated_at': '2024-10-20T02:08:07.326438-07:00'}]\n", + "[{'id': '990abd8e-0bad-40be-9e06-50b55bc30aec', 'memory_id': '919c84e2-f54c-4a38-a905-973eb90b6c6d', 'input': [{'role': 'user', 'content': 'My name is Raghu.'}], 'old_memory': None, 'new_memory': 'Name is Raghu', 'user_id': 'mayank_21', 'categories': ['personal_details'], 'event': 'ADD', 'metadata': None, 'created_at': '2024-10-20T02:10:29.136596-07:00', 'updated_at': '2024-10-20T02:10:29.128864-07:00'}]\n", + "[{'id': '1bc43d66-ff5d-4eef-9dbe-58b98539304c', 'data': {'memory': 'Likes to eat ice cream'}, 'event': 'ADD'}]\n", + "[{'id': '181e2e16-c841-4c7b-a77e-8d062adc1391', 'memory_id': '1bc43d66-ff5d-4eef-9dbe-58b98539304c', 'input': [{'role': 'user', 'content': 'I like to eat ice cream'}], 'old_memory': None, 'new_memory': 'Likes to eat ice cream', 'user_id': 'mayank_21', 'categories': ['food'], 'event': 'ADD', 'metadata': None, 'created_at': '2024-10-20T02:13:05.850445-07:00', 'updated_at': '2024-10-20T02:13:05.843199-07:00'}]\n", + "[{'id': '1bc43d66-ff5d-4eef-9dbe-58b98539304c', 'data': {'memory': 'Likes to eat ice cream'}, 'event': 'DELETE'}]\n", + "#1038102d-4fcf-41d5-833a-d21bec50680d\n", + "#fba767e3-ac98-4541-9187-fc7dc7b07a27" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[{'id': '41f66e2c-5da7-404f-bd67-b05996962e7a', 'memory': 'Likes ice cream', 'user_id': 'raghu_1', 'hash': '665c416431c628e75f708cf71ced25ab', 'metadata': None, 'categories': ['food'], 'created_at': '2024-10-20T02:36:01.701954-07:00', 'updated_at': '2024-10-20T02:36:01.701974-07:00', 'score': 0.33866615160300395}, {'id': 'adb12a05-8c10-44c9-a035-258d65ca706c', 'memory': 'Does not like ice cream', 'user_id': 'raghu_1', 'hash': 'f03f2523a65c1319e3b629944d622ae7', 'metadata': None, 'categories': ['user_preferences', 'food'], 'created_at': '2024-10-20T02:21:50.007369-07:00', 'updated_at': '2024-10-20T02:21:50.007387-07:00', 'score': 0.3099606797040534}, {'id': 'b86a079f-31ba-448e-be81-3524bc8ef39c', 'memory': 'Name is Raghu', 'user_id': 'raghu_1', 'hash': 'da0d42ceaaab2f5263b7be1611fd5e88', 'metadata': None, 'categories': ['personal_details'], 'created_at': '2024-10-20T02:21:49.973540-07:00', 'updated_at': '2024-10-20T02:21:49.973559-07:00', 'score': 0.30741360320096134}]\n" + ] + } + ], + "source": [ + "res = client.search(query='What do I have in desert?',user_id='raghu_1')\n", + "print(res)" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "3\n", + "4\n" + ] + } + ], + "source": [ + "a = [1,2,3,4]\n", + "\n", + "for i in a[2:]:\n", + " print(i)" + ] + }, + { + "cell_type": "code", + "execution_count": 64, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "4" + ] + }, + "execution_count": 65, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a[-1]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/docs/docs/examples/memory/Mem0FunctionalAgent.ipynb b/docs/docs/examples/memory/Mem0FunctionalAgent.ipynb index 5fb6aaa05addd..b243a41fddcda 100644 --- a/docs/docs/examples/memory/Mem0FunctionalAgent.ipynb +++ b/docs/docs/examples/memory/Mem0FunctionalAgent.ipynb @@ -34,15 +34,15 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ - "from llama_index.memory.mem0 import Mem0Composable\n", + "from llama_index.memory.mem0 import Mem0ComposableMemory\n", "context_dict = {\n", - " \"user_id\": \"mayank_11\"\n", + " \"user_id\": \"david_2\"\n", "}\n", - "memory=Mem0Composable.from_client(\n", + "memory=Mem0ComposableMemory.from_client(\n", " chat_history=ChatMemoryBuffer.from_defaults(),\n", " context_dict=context_dict,\n", " api_key=\"\"\n", @@ -51,7 +51,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -64,7 +64,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -89,7 +89,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -103,17 +103,17 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "> Running step 708deb9e-029b-4720-a3e7-377d49c1d8b8. Step input: Hi, My name is Mayank.\n", + "> Running step 720c2cbe-9e70-4f88-9d28-09fe4064b6fb. Step input: Hi, My name is Mayank.\n", "Added user message to memory: Hi, My name is Mayank.\n", "=== LLM Response ===\n", - "Hello, Mayank! How can I assist you today?\n" + "Hello Mayank! How can I assist you today?\n" ] } ], @@ -123,17 +123,17 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 8, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "> Running step 37d4c3db-1856-4d12-b365-20079e133b27. Step input: My preferred way of communication would be Email.\n", + "> Running step c99b8ee5-af6b-4e88-b977-9bedbbd04d73. Step input: My preferred way of communication would be Email.\n", "Added user message to memory: My preferred way of communication would be Email.\n", "=== LLM Response ===\n", - "Got it, Mayank! I'll use email as your preferred method of communication. If you need anything specific, just let me know!\n" + "Got it, Mayank! I'll make sure to use email as your preferred method of communication. If there's anything specific you need, just let me know!\n" ] } ], @@ -143,21 +143,21 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 9, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "> Running step 1a9281ba-5041-4a76-8622-e98072cbf371. Step input: Send me an update of your product.\n", + "> Running step 4806bdb7-1123-4774-b7e1-a445d5fb9681. Step input: Send me an update of your product.\n", "Added user message to memory: Send me an update of your product.\n", "=== Calling Function ===\n", "Calling function: email_fn with args: {\"name\": \"Mayank\"}\n", "Emailing... Mayank\n", "=== Function Output ===\n", "None\n", - "> Running step 3af3dcad-88cc-4f8e-9f87-838503261563. Step input: None\n", + "> Running step d060a412-006f-4d56-968a-c64e6b902590. Step input: None\n", "=== LLM Response ===\n", "I've sent you an update of our product via email. If you have any questions or need further information, feel free to ask!\n" ] @@ -176,7 +176,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 11, "metadata": {}, "outputs": [], "source": [ @@ -206,16 +206,16 @@ " },\n", " \"version\": \"v1.1\"\n", "}\n", - "memory = Mem0Composable .from_config(\n", + "memory = Mem0ComposableMemory.from_config(\n", " context_dict=context_dict,\n", " confif_dict=config,\n", - " primary_memory=ChatMemoryBuffer.from_defaults()\n", + " chat_history=ChatMemoryBuffer.from_defaults()\n", ")" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 12, "metadata": {}, "outputs": [], "source": [ @@ -229,17 +229,17 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 13, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "> Running step 479c16d2-36f3-48e8-93f0-3f685e46243d. Step input: Hi, My name is Mayank.\n", + "> Running step 52ab5c4f-ba25-48e9-9a70-a4e0ae5db1b7. Step input: Hi, My name is Mayank.\n", "Added user message to memory: Hi, My name is Mayank.\n", "=== LLM Response ===\n", - "Hello Mayank! How can I assist you today?\n" + "Hello, Mayank! How can I assist you today?\n" ] } ], @@ -249,17 +249,17 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 14, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "> Running step 9fe132bf-3c53-4f24-88e6-325702f121d0. Step input: My preferred way of communication would be Call.\n", + "> Running step a661bd23-60a6-4085-bf5f-37783597f37f. Step input: My preferred way of communication would be Call.\n", "Added user message to memory: My preferred way of communication would be Call.\n", "=== LLM Response ===\n", - "Thank you for updating your communication preference to calls. If you need anything, just let me know!\n" + "Got it, Mayank! If you need to communicate, I'll make sure to use a call. How can I assist you further?\n" ] } ], @@ -269,23 +269,23 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 15, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "> Running step bf45f0b6-706a-4090-8045-be3dc6053d90. Step input: Send me an update of your product.\n", + "> Running step 7fb66b1b-042f-4b3d-b04a-88ca93ec9281. Step input: Send me an update of your product.\n", "Added user message to memory: Send me an update of your product.\n", "=== Calling Function ===\n", "Calling function: call_fn with args: {\"name\": \"Mayank\"}\n", "Calling... Mayank\n", "=== Function Output ===\n", "None\n", - "> Running step b6ff6eb1-12f2-4c79-9792-6e5297839310. Step input: None\n", + "> Running step 132367ce-9afe-400d-bb29-deecc2bc25eb. Step input: None\n", "=== LLM Response ===\n", - "I've arranged to call you with an update on our product. If there's anything else you need, feel free to let me know!\n" + "I've initiated a call to provide you with the product update. Please check your phone.\n" ] } ], diff --git a/llama-index-integrations/memory/llama-index-memory-mem0/llama_index/memory/mem0/__init__.py b/llama-index-integrations/memory/llama-index-memory-mem0/llama_index/memory/mem0/__init__.py index b5420ff90d61f..af368952d98ec 100644 --- a/llama-index-integrations/memory/llama-index-memory-mem0/llama_index/memory/mem0/__init__.py +++ b/llama-index-integrations/memory/llama-index-memory-mem0/llama_index/memory/mem0/__init__.py @@ -1,3 +1,3 @@ -from llama_index.memory.mem0.base import Mem0Composable +from llama_index.memory.mem0.base import Mem0ComposableMemory -__all__ = ["Mem0Composable"] +__all__ = ["Mem0ComposableMemory"] diff --git a/llama-index-integrations/memory/llama-index-memory-mem0/llama_index/memory/mem0/base.py b/llama-index-integrations/memory/llama-index-memory-mem0/llama_index/memory/mem0/base.py index 98ff7408c5a0b..4bd39bda3c555 100644 --- a/llama-index-integrations/memory/llama-index-memory-mem0/llama_index/memory/mem0/base.py +++ b/llama-index-integrations/memory/llama-index-memory-mem0/llama_index/memory/mem0/base.py @@ -3,24 +3,32 @@ from llama_index.core.memory.types import BaseMemory from llama_index.memory.mem0.utils import convert_memory_to_system_message from mem0 import MemoryClient, Memory -from pydantic import BaseModel, Field, ValidationError, model_validator, SerializeAsAny +from pydantic import BaseModel, Field, ValidationError, model_validator, SerializeAsAny, PrivateAttr from llama_index.core.base.llms.types import ChatMessage, MessageRole class BaseMem0(BaseMemory): """Base class for Mem0""" + _client: Optional[Union[MemoryClient, Memory]] = PrivateAttr(default=None) + class Config: arbitrary_types_allowed = True - client: Optional[Union[MemoryClient, Memory]] = None - + def __init__(self, **data): + super().__init__(**data) + self._client = data.get('client') + #TODO: Return type def add(self, messages: Union[str, List[Dict[str, str]]], **kwargs) -> Optional[Any]: - response = self.client.add(messages=messages, **kwargs) + if self._client is None: + raise ValueError("Client is not initialized") + response = self._client.add(messages=messages, **kwargs) return response - + #TODO: Return type def search(self, query: str, **kwargs) -> Optional[Any]: - response = self.client.search(query=query, **kwargs) + if self._client is None: + raise ValueError("Client is not initialized") + response = self._client.search(query=query, **kwargs) return response #TODO: Add more apis from client @@ -45,23 +53,22 @@ def get_context(self) -> Dict[str, Optional[str]]: class Config: validate_assignment = True -class Mem0Composable(BaseMem0): - #TODO: Make it private variable - chat_history: SerializeAsAny[BaseMemory] = Field( - description="Primary memory source for chat agent.", - ) - #TODO: Make it private variable - mem0_history: Dict[str, Any] = {} - context: Optional[Mem0Context] = None +class Mem0ComposableMemory(BaseMem0): + chat_history: SerializeAsAny[BaseMemory] = Field(description="Primary memory source for chat agent.") + _context: Optional[Mem0Context] = PrivateAttr(default=None) + + def __init__(self, **data): + super().__init__(**data) + self._context = data.get('context') @classmethod def class_name(cls) -> str: """Class name.""" - return "Mem0Composable" + return "Mem0ComposableMemory" #TODO: Not functional yet. @classmethod - def from_defaults(cls, **kwargs: Any) -> "Mem0Composable": + def from_defaults(cls, **kwargs: Any) -> "Mem0ComposableMemory": raise NotImplementedError("Use either from_client or from_config") @classmethod @@ -85,13 +92,12 @@ def from_client( except ValidationError as e: raise ValidationError(f"Context validation error: {e}") - client = MemoryClient( - api_key=api_key, - host=host, - organization=organization, - project=project + client = MemoryClient(api_key=api_key, host=host, organization=organization, project=project) + return cls( + chat_history=chat_history, + context=context, + client=client ) - return cls(chat_history=chat_history, client=client, context=context) @classmethod def from_config( @@ -112,7 +118,11 @@ def from_config( raise ValidationError(f"Context validation error: {e}") client = Memory.from_config(config_dict=confif_dict) - return cls(chat_history=chat_history, context=context, client=client) + return cls( + chat_history=chat_history, + context=context, + client=client + ) def get(self, input: Optional[str] = None, **kwargs: Any) -> List[ChatMessage]: messages = self.chat_history.get(input=input, **kwargs) @@ -120,21 +130,21 @@ def get(self, input: Optional[str] = None, **kwargs: Any) -> List[ChatMessage]: # Iterate through messages from last to first for message in reversed(messages): if message.role == MessageRole.USER: - most_recent_user_message = message + _recent_user_message = message break else: - # If no user message is found, raise an exception - raise ValueError("No user message found in chat history") - input = str(most_recent_user_message.content) + raise ValueError("No input and user message found in chat history.") + input = str(_recent_user_message.content) #TODO: Add support for more kwargs, for api and oss - search_results = self.search(query=input, **self.context.get_context()) - if isinstance(self.client, Memory): + search_results = self.search(query=input, **self._context.get_context()) + if isinstance(self._client, Memory): search_results = search_results['results'] system_message = convert_memory_to_system_message(search_results) - #TODO: What if users provide system_message or prefix_message, or system_message in chat_history becaomes old. + # If system message is present if len(messages) > 0 and messages[0].role == MessageRole.SYSTEM: + #TODO: What if users provide system_message or prefix_message, or system_message in chat_history becaomes old enough? assert messages[0].content is not None system_message = convert_memory_to_system_message(response=search_results, existing_system_message=messages[0]) messages.insert(0, system_message) @@ -144,32 +154,30 @@ def get_all(self) -> List[ChatMessage]: """Returns all chat history.""" return self.chat_history.get_all() - def _add_to_memory(self, message: ChatMessage) -> None: + def _add_user_msg_to_memory(self, message: ChatMessage) -> None: """Only add new user message to client memory.""" if message.role == MessageRole.USER: - msg_str = str(message.content) - if msg_str not in self.mem0_history: - #TODO: Implement for more kwargs - response = self.client.add( - messages=msg_str, - **self.context.get_context() - ) - self.mem0_history[msg_str] = response + self.add( + messages=str(message.content), + **self._context.get_context() + ) def put(self, message: ChatMessage) -> None: - """Add message to chat history. Add new user message to client memory.""" + """Add message to chat history. Add user message to client memory.""" + self._add_user_msg_to_memory(message) self.chat_history.put(message) - self._add_to_memory(message) def set(self, messages: List[ChatMessage]) -> None: """Set chat history. Add new user message to client memory.""" + initial_chat_len = len(self.chat_history.get_all()) + #Insert only new chat messages + for message in messages[initial_chat_len:]: + self._add_user_msg_to_memory(message) self.chat_history.set(messages) - for message in messages: - self._add_to_memory(message) def reset(self) -> None: """Only reset chat history""" - #TODO: Not resetting client memory, since it is not context specific. + #TODO: Context specific reset is missing in client. self.chat_history.reset()