diff --git a/README.md b/README.md index 5cbecc4..14bea67 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,10 @@ Merlin is a powerful, multi-agent AI assistant designed to help you with various - can always access memory and show whats there. - stored in memory.yaml so you the use has easy access to the long term memory +### File Expert +- Find, read and save files +- User control over where the LLM can look + ## Future Vision Merlin's long-term goal is to become a comprehensive AI assistant that can: diff --git a/pyproject.toml b/pyproject.toml index 8580b33..56fa8be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "Merlin" -version = "1.2.4" +version = "1.3.0" description = "Merlin - Your AI Assistant with multi-agent architecture" readme = "README.md" requires-python = ">=3.14" diff --git a/src/config/app_config.py b/src/config/app_config.py index 2b28373..ff4a6d6 100644 --- a/src/config/app_config.py +++ b/src/config/app_config.py @@ -1,8 +1,11 @@ +import os from pathlib import Path from types import SimpleNamespace import yaml +from .user_config import UserConfig + class Config: """Main Config Class for application-level configuration.""" @@ -10,6 +13,8 @@ class Config: ENVIRONMENT = "dev" DEBUG = True LOG_LEVEL = "DEBUG" + FILE_SEARCH_DIRECTORIES = [os.path.expanduser("~")] + FILE_SEARCH_DIRECTORIES.extend(UserConfig.FILE_SEARCH_DIRECTORIES) class Model: """Application-level model configuration with inheritance support.""" @@ -56,8 +61,6 @@ def _get_base_config(cls): def _merge_user_config(cls, base_config): """Merge user configuration overrides with base config.""" try: - from .user_config import UserConfig - user_model_config = UserConfig.Model # Override base config with user settings diff --git a/src/config/user_config_template.py b/src/config/user_config_template.py index 37d94fd..4eb8d6b 100644 --- a/src/config/user_config_template.py +++ b/src/config/user_config_template.py @@ -8,6 +8,10 @@ class UserConfig: """User-specific model configurations - override application defaults.""" + # List of file paths you want the AI to start in when searching for files + # We already default to your user home folder + FILE_SEARCH_DIRECTORIES = [] + class Model: """Personal model preferences and overrides.""" diff --git a/src/core/model_factory.py b/src/core/model_factory.py index ecbc0e0..7afed2d 100644 --- a/src/core/model_factory.py +++ b/src/core/model_factory.py @@ -64,3 +64,8 @@ def create_lighting_model() -> dspy.LM: def create_memory_model() -> dspy.LM: """Create memory expert model.""" return ModelFactory.create_dspy_model("expert", "memory") + + @staticmethod + def create_file_model() -> dspy.LM: + """Create file expert model.""" + return ModelFactory.create_dspy_model("expert", "file") diff --git a/src/experts/file.py b/src/experts/file.py new file mode 100644 index 0000000..45d846d --- /dev/null +++ b/src/experts/file.py @@ -0,0 +1,98 @@ +import os +import subprocess + +import dspy + +from config import Config + + +class FileExpertSignature(dspy.Signature): + """You are the file expert responsible for managing files. + + Your role: + - file management, finding, reading, amending and saving + - returning file content and / or file location to the requestor + """ + + command: str = dspy.InputField( + desc="A natural language command describing what you should do with a file" + ) + answer: str = dspy.OutputField( + desc="The contents of a file or a confirmation of a file action" + ) + + +class FileAgent(dspy.Module): + """A File Agent that has access to file-based tools.""" + + def __init__(self): + """Initialise the file agent. + + Search only in the user's home directory and subdirectories. + """ + # Tools exposed to the ReAct loop + self.tools = [ + self.load_file, + self.write_file, + self.find_file, + ] + self.file_agent = dspy.ReAct(signature=FileExpertSignature, tools=self.tools) + + def load_file(self, file_path) -> str | None: + """Load and return specified file.""" + if os.path.exists(file_path): + try: + with open(file_path) as file: + return file.read() + except Exception: + return None + else: + return None + + def write_file(self, file_path: str, content: str) -> bool: + """Write or overwrite the specified file with given content.""" + try: + with open(file_path, "w") as f: + f.write(content) + return True + except Exception: + return False + + def find_file(self, filename: str) -> list[str]: + """Search for *filename* in the user's home directory and subdirectories. + + Returns a list of all absolute paths that exist. + """ + try: + search_dirs = Config.FILE_SEARCH_DIRECTORIES + # Add any additional directories here if needed + # For example: search_dirs.append('/tmp') + matches = [] + # Build the find command with multiple starting points + for directory in search_dirs: + cmd = [ + "find", + directory, + "-name", + filename, + "-not", + "-type", + "l", + "-not", + "-path", + "*/.*", + ] + + result = subprocess.run(cmd, capture_output=True, text=True, check=True) + # Split output by newlines and filter out empty lines + found_matches = [ + line for line in result.stdout.strip().split("\n") if line + ] + matches.extend(found_matches) + return matches + except subprocess.CalledProcessError: + # If find command fails, return empty list + return [] + except Exception: + # Handle any other exceptions + return [] diff --git a/src/experts/orchestrator.py b/src/experts/orchestrator.py index 09d03dc..7605b98 100644 --- a/src/experts/orchestrator.py +++ b/src/experts/orchestrator.py @@ -2,6 +2,7 @@ from core import ModelFactory +from .file import FileAgent from .game import GameAgent from .lights import LightingAgent from .memory import MemoryAgent @@ -32,6 +33,7 @@ def __init__(self): self.consult_weather_expert, self.consult_lighting_expert, self.consult_memory_expert, + self.consult_file_expert, ] self.oracle = dspy.ReAct( signature=OrchestratorSignature, tools=self.tools, max_iters=10 @@ -67,7 +69,7 @@ def consult_lighting_expert(self, command: str) -> str: return result.answer def consult_memory_expert(self, command: str) -> str: - """Use this expert when you want to save or retrieve information. + """Use this expert when you want to save or retrieve your memory. Use this expert when you want to save or retrieve any information that should be stored for future use. Use this expert whenever you @@ -78,3 +80,12 @@ def consult_memory_expert(self, command: str) -> str: with dspy.context(lm=ModelFactory.create_memory_model()): result = MemoryAgent().memory_agent(command=command) return result.answer + + def consult_file_expert(self, command: str) -> str: + """Use this expert when you want to save or retrieve information from files. + + Also used to find files and update files + """ + with dspy.context(lm=ModelFactory.create_file_model()): + result = FileAgent().file_agent(command=command) + return result.answer diff --git a/src/main.py b/src/main.py index 61ab86e..29d0acb 100644 --- a/src/main.py +++ b/src/main.py @@ -97,16 +97,6 @@ def handle_question_processing(question, history): def display_response_and_routing(result): """Display the AI response and routing information.""" - # Display AI response in a beautiful panel - ai_response = Panel( - Markdown(result.answer), - title="🤖🧙‍♂️ Merlin Response", - border_style="cyan", - padding=(1, 2), - ) - console.print(ai_response) - console.print() - # Check if any expert agents were called expert_calls = [] if hasattr(result, "trajectory") and isinstance(result.trajectory, dict): @@ -116,10 +106,19 @@ def display_response_and_routing(result): expert_calls.append(str(value)) if expert_calls: - console.print(f"[dim]🔀 Routed to: {', '.join(set(expert_calls))}[/dim]") + handled_by = f"🔀 Routed to: {', '.join(set(expert_calls))}" else: - console.print("[dim]💬 Handled directly by orchestrator[/dim]") - console.print() + handled_by = "💬 Handled directly by orchestrator" + + # Display AI response in a beautiful panel + ai_response = Panel( + Markdown(result.answer), + title="🤖🧙‍♂️ Merlin Response", + subtitle=handled_by, + border_style="cyan", + padding=(1, 2), + ) + console.print(ai_response) def chat_interface(): diff --git a/uv.lock b/uv.lock index 050f7f2..634129d 100644 --- a/uv.lock +++ b/uv.lock @@ -632,7 +632,7 @@ wheels = [ [[package]] name = "litellm" -version = "1.79.0" +version = "1.79.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, @@ -648,9 +648,9 @@ dependencies = [ { name = "tiktoken" }, { name = "tokenizers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/90/52/2853febf8ea3072d8c76e3ee22d3168e6a4f97ebd8f21905e815a381c58b/litellm-1.79.0.tar.gz", hash = "sha256:f58bb751222ee0e1ffecb2d44987999f9fa94130a6d1a478e19a3e5e8b9a7414", size = 11146414, upload-time = "2025-10-26T01:20:55.247Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/12/1c30f1019892399a488ed60ebcdfed3e2603123d9591030abc8c702ff37a/litellm-1.79.1.tar.gz", hash = "sha256:c1cf0232c01e7ad4b8442d2cdd78973ce74dfda37ad1d9f0ec3c911510e26523", size = 11216675, upload-time = "2025-11-01T19:22:05.523Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/26/a5fef380af5d6a2f47cda979d88561af1e1a8efc07da2ef72c0e8cb6842c/litellm-1.79.0-py3-none-any.whl", hash = "sha256:93414b6ed55fa9e3268e8cb3100faab960c9ecd18173129ccd85471cf3db4f1a", size = 10197864, upload-time = "2025-10-26T01:20:51.75Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e4/ac5905dfe9c0c195e59c36ea431277090dd2aa1acbcc514f781fa87a5903/litellm-1.79.1-py3-none-any.whl", hash = "sha256:738f7bf36b31514ac11cc71f65718238b57696fcf22f8b3f1e57c44daf17a569", size = 10285849, upload-time = "2025-11-01T19:22:01.637Z" }, ] [[package]] @@ -726,7 +726,7 @@ wheels = [ [[package]] name = "merlin" -version = "1.2.4" +version = "1.3.0" source = { editable = "." } dependencies = [ { name = "dspy" },