From bf7e87fe88ad005b8f194b383f3d149246f5ec10 Mon Sep 17 00:00:00 2001 From: Mike Bird <63524998+MikeBirdTech@users.noreply.github.com> Date: Sat, 23 Aug 2025 12:16:17 -0400 Subject: [PATCH] feat: Add OpenAI support for plugin generation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add OpenAI as a supported AI service option with GPT-5 as default model - Update AIService class to handle OpenAI model configuration - Update CLI to accept --ai openai option - Update README with OpenAI setup instructions and usage examples Fixes #1 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude --- README.md | 9 ++++++--- obsidian_plugin_generator.py | 9 +++++++-- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index aa350c3..0dbf71a 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,9 @@ Click the image above to watch the demo of using the Obsidian Plugin Generator. ``` 3. Set up environment variables: - `GROQ_API_KEY`: Your Groq API key (if using Groq) - - `OLLAMA_MODEL`: Your preferred Ollama model (default is "llama3.1") + - `OPENAI_API_KEY`: Your OpenAI API key (if using OpenAI) + - `ANTHROPIC_API_KEY`: Your Anthropic API key (if using Anthropic) + - `OLLAMA_MODEL`: Your preferred Ollama model (default is "gemma3:latest") ## Usage @@ -28,7 +30,7 @@ python obsidian_plugin_generator.py [plugin_name] [options] ### Options - `--vault-path PATH`: Path to Obsidian vault (default: ~/Documents/ObsidianVault) -- `--ai {ollama,groq,anthropic}`: AI service to use (default: ollama) +- `--ai {ollama,groq,anthropic,openai}`: AI service to use (default: ollama) - `--name`: Name of the plugin (default: "My Obsidian Plugin") ### Examples @@ -37,12 +39,13 @@ python obsidian_plugin_generator.py [plugin_name] [options] python obsidian_plugin_generator.py --name "My Custom Plugin" python obsidian_plugin_generator.py --name "Task Tracker" --vault-path ~/Obsidian/MyVault python obsidian_plugin_generator.py --name "Code Snippets" --ai groq +python obsidian_plugin_generator.py --name "AI Assistant" --ai openai --model gpt-5 ``` ## Features - AI-powered plugin generation based on user descriptions -- Supports multiple AI services: Ollama and Groq +- Supports multiple AI services: Ollama, Groq, Anthropic, and OpenAI - Automatically clones and modifies the Obsidian sample plugin - Generates enhanced TypeScript code for the plugin - Handles existing directories with options to overwrite, rename, or cancel diff --git a/obsidian_plugin_generator.py b/obsidian_plugin_generator.py index 14dc292..2426162 100644 --- a/obsidian_plugin_generator.py +++ b/obsidian_plugin_generator.py @@ -30,6 +30,8 @@ def __init__(self, service_type: str, model: Optional[str] = None): self.model = "llama-3.3-70b-versatile" elif self.service_type == "ollama": self.model = "gemma3:latest" + elif self.service_type == "openai": + self.model = "gpt-5" # Add other defaults if needed def query(self, prompt: str, max_retries: int = 3) -> str: @@ -54,8 +56,11 @@ def query(self, prompt: str, max_retries: int = 3) -> str: litellm_model = ( f"groq/{model_str}" if not model_str.startswith("groq/") else model_str ) + elif self.service_type == "openai": + # OpenAI models can be used directly without prefix in litellm + litellm_model = model_str else: - # Assuming direct use for other potential services (like OpenAI) + # Assuming direct use for other potential services litellm_model = model_str messages = [{"role": "user", "content": prompt}] @@ -360,7 +365,7 @@ def main(): ) parser.add_argument( "--ai", - choices=["ollama", "groq", "anthropic"], + choices=["ollama", "groq", "anthropic", "openai"], default="ollama", help="AI service to use", )