diff --git a/.gitignore b/.gitignore index 6635cf55..b83f10bc 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,6 @@ node_modules !.env.example vite.config.js.timestamp-* vite.config.ts.timestamp-* + +# Chunkhound +.chunkhound* diff --git a/.mcp.json b/.mcp.json new file mode 100644 index 00000000..cff43fc2 --- /dev/null +++ b/.mcp.json @@ -0,0 +1,11 @@ +{ + "mcpServers": { + "ChunkHound": { + "command": "chunkhound", + "args": [ + "mcp" + ], + "env": {} + } + } +} \ No newline at end of file diff --git a/DOC_STRUCTURE.md b/DOC_STRUCTURE.md new file mode 100644 index 00000000..ef316dfd --- /dev/null +++ b/DOC_STRUCTURE.md @@ -0,0 +1,98 @@ +# Documentation Structure + +This document explains the organization of planning and documentation in the Flareup project. + +## 📋 Active Documents (ROOT LEVEL) + +### [ROADMAP.md](/ROADMAP.md) ⭐ PRIMARY +**The single source of truth for project planning.** + +- **Purpose:** Comprehensive roadmap and status tracking +- **Updated:** Dec 24, 2025 (v0.1.1) +- **Contains:** + - Current status (75% Raycast parity) + - Recent wins and completed work + - Remaining work prioritized by phase + - Future enhancements + - Strategic priorities + - Known issues and limitations + - Milestones and progress tracking + +**Always consult ROADMAP.md first for:** +- What's next to work on +- Current project status +- Feature priorities +- Milestone tracking + +## 📚 Archived Documents (docs/archive/) + +### Purpose +Historical planning documents from the Dec 21-22, 2025 audit period. These documents informed the current ROADMAP but have been largely superseded. + +### Contents +- **[README.md](./docs/archive/README.md)** - Explains archive organization +- **[TODO.md](./docs/archive/TODO.md)** - Detailed task breakdown (Dec 21 audit) +- **[FEATURE_IDEAS.md](./docs/archive/FEATURE_IDEAS.md)** - Feature brainstorming +- **[RAYCAST_GAPS.md](./docs/archive/RAYCAST_GAPS.md)** - Parity analysis +- **[AUDIT_REPORT.md](./docs/archive/AUDIT_REPORT.md)** - Comprehensive code audit +- **[CLAUDE_REVIEW_2025-12-22.md](./docs/archive/CLAUDE_REVIEW_2025-12-22.md)** - Code review + +### When to Use +- **For historical context** - Understanding how we got here +- **For detailed technical debt** - AUDIT_REPORT.md has deep analysis +- **For ideation** - FEATURE_IDEAS.md has brainstorming notes + +**Do NOT use for:** +- Current priorities (use ROADMAP.md instead) +- Feature planning (use ROADMAP.md instead) +- Status tracking (use ROADMAP.md instead) + +## 🔄 Workflow + +### Planning New Work +1. Check [ROADMAP.md](/ROADMAP.md) for current priorities +2. Identify next phase/task +3. Create implementation plan in brain artifacts +4. Execute and update ROADMAP.md as needed + +### Updating Status +1. Update [ROADMAP.md](/ROADMAP.md) "Recent Wins" section +2. Update version numbers and parity percentage +3. Move completed items from "Remaining Work" to changelog +4. Adjust priorities based on new information + +### Completing Milestones +1. Update [ROADMAP.md](/ROADMAP.md) milestone checklist +2. Update parity percentage +3. Add changelog entry +4. Consider archiving any temporary planning docs + +## 📝 Documentation Principles + +### ROADMAP.md Should Be: +- ✅ Comprehensive yet scannable +- ✅ Updated as work progresses +- ✅ Single source of truth +- ✅ Written for humans (clear, organized) + +### Archived Docs Should Be: +- ✅ Clearly marked as archived +- ✅ Pointing to ROADMAP.md +- ✅ Preserved for historical reference +- ✅ NOT actively maintained + +## 🎯 Quick Reference + +| Need to... | Use this document | +|------------|-------------------| +| Know what to work on next | [ROADMAP.md](/ROADMAP.md) → "Remaining Work" | +| Check current status | [ROADMAP.md](/ROADMAP.md) → "Current Status" | +| See what's been done | [ROADMAP.md](/ROADMAP.md) → "Recent Wins" | +| Understand a past decision | [docs/archive/](/docs/archive/) | +| Plan future features | [ROADMAP.md](/ROADMAP.md) → "Future Enhancements" | +| Find technical debt | [docs/archive/AUDIT_REPORT.md](/docs/archive/AUDIT_REPORT.md) | + +--- + +**Last Updated:** Dec 24, 2025 +**Maintained By:** Keep ROADMAP.md current, archive everything else diff --git a/README.md b/README.md deleted file mode 100644 index b99009c4..00000000 --- a/README.md +++ /dev/null @@ -1,132 +0,0 @@ -# Flare Launcher - -An open-source, Raycast-compatible launcher for Linux. - -For more background on this project, I have a [post here](https://byteatatime.dev/posts/recreating-raycast). - -![GIF of Flare, showing off its main features](https://raw.githubusercontent.com/ByteAtATime/flare/main/images/flare.gif) - -**Disclaimer:** This is a hobby project and is **not** affiliated with, nor endorsed by, the official Raycast team. - -## ✨ Features - -This launcher aims to recreate most of Raycast's core features on Linux: - -- **Extensible Command Palette**: The core of the application. Search for and launch applications, run commands, execute quicklinks, and more. -- **Extension Support**: Run extensions built Raycast's API. Features a built-in store to browse and install extensions from the official Raycast Store. -- **Powerful Calculator**: A smart calculator integrated directly into the search bar, powered by **SoulverCore**. It handles unit conversions, currency, and complex math expressions. -- **Clipboard History**: A searchable history of everything you've copied, with support for text, images, links, and colors. -- **Snippets**: Create and manage text snippets that can be expanded anywhere on your system. Supports dynamic placeholders for dates, clipboard content, and more. -- **AI Integration**: Connects to OpenRouter to bring the power of various AI models directly into the launcher. -- **And more** to come! - -## 🧩 Extension Compatibility - -While the goal is to support a wide range of Raycast extensions, there are some inherent limitations due to the differences between macOS and Linux. Common reasons an extension might not work include: - -1. **macOS-specific APIs**: Many extensions rely on native macOS features like AppleScript (`runAppleScript`), hardcoded paths (`/Applications/`), or specific system libraries that do not exist on Linux. -2. **Native Binaries**: Extensions that bundle pre-compiled binaries for macOS will not work. Similarly, extensions that use Swift to interact with the operating system won't work either. -3. **Assumed Permissions**: Extensions may assume they have access to macOS-specific permissioned data (like Contacts or Calendars) which have no direct equivalent. - -## 🚀 Installation - -You can download the latest release from the [GitHub Releases page](https://github.com/ByteAtATime/flare/releases). - -Currently, only an `.AppImage` is provided. After downloading, make it executable: - -```bash -chmod +x .AppImage -./.AppImage -``` - -This will open a long-running process in the background. To toggle the visibility of the window, simply run it again. - -Depending on your environment, you may be able to bind the script to a hotkey. For example, on Hyprland: - -``` -bind = ALT, Space, exec, /path/to/flare.AppImage -``` - -### System Requirements - -The application requires `glibc` version **2.38**, which is installed by default on Ubuntu 24.04, Fedora 40, and recent versions of Arch Linux. - -**Wayland users:** For the global snippet expansion feature to work, the application needs permission to read keyboard events. The recommended and most secure method is to add a `udev` rule. - -1. Create the `udev` rule file: - - ```bash - sudo nano /etc/udev/rules.d/99-flare.rules - ``` - -2. Add the following line to the file: - This rule grants the user at the physical console access to keyboard devices. - - ``` - KERNEL=="event*", ENV{ID_INPUT_KEYBOARD}=="1", TAG+="uaccess" - ``` - -3. Reload the `udev` rules to apply the changes: - ```bash - sudo udevadm control --reload-rules && sudo udevadm trigger - ``` - -## 🛠️ Building from Source - -If you prefer to build the project from its source code, you'll need to set up the development environment. - -### Prerequisites - -- **Rust**: Install via `rustup`. -- **Node.js**: Use a recent LTS version. `pnpm` is the package manager for this project (`npm i -g pnpm`). -- **Tauri Prerequisites**: Follow the official [Tauri guide](https://v2.tauri.app/start/prerequisites/) to install system dependencies. -- **Swift Toolchain**: The calculator feature uses a Swift wrapper around SoulverCore. - -### Installation & Running - -1. **Clone the repository:** - - ```bash - git clone https://github.com/ByteAtATime/flare.git - cd flare - ``` - -2. **Install dependencies:** - This project uses a pnpm workspace. Install all dependencies from the root directory. - - ```bash - pnpm install - ``` - -3. **Build the Node.js sidecar binary:** - This step compiles the JavaScript plugin host into a binary that Tauri can execute. - - ```bash - pnpm --filter sidecar build - ``` - -4. **Compile SoulverCore Wrapper:** - This step compiles the wrapper around SoulverCore into a shared object file. - - ```bash - swift build -c release --package-path src-tauri/SoulverWrapper - ``` - -5. **Run in development mode:** - This command will launch the Tauri application with hot-reloading for the frontend. Note that you may need to tweak `LD_LIBRARY_PATH` to point to the SoulverWrapper. - - ```bash - export LD_LIBRARY_PATH="$(pwd)/src-tauri/SoulverWrapper/Vendor/SoulverCore-linux:$(pwd)/src-tauri/SoulverWrapper/.build/release" - pnpm tauri dev - ``` - -## 🙏 Acknowledgements - -This project stands on the shoulders of giants: - -- A huge thank you to the team behind the original **[Raycast](https://raycast.com)**. -- The powerful calculator is powered by **[SoulverCore](https://github.com/soulverteam/SoulverCore)** by Acqualia. Special thanks to Zac for getting me a Linux build -- on his vacation, no less! - -## 📜 License - -This project is licensed under the [MIT License](LICENSE). diff --git a/ROADMAP.md b/ROADMAP.md new file mode 100644 index 00000000..89584225 --- /dev/null +++ b/ROADMAP.md @@ -0,0 +1,415 @@ +# Flare Development Roadmap +**Last Updated:** 2025-12-24 +**Current Version:** 0.1.1 +**Raycast Parity:** ~80% + +--- + +## 🎯 Project Vision + +Build a Raycast-quality launcher for Linux with native system integration and extension compatibility. + +**Core Goals:** +- 90%+ Raycast feature parity +- Better Linux-native integration than Raycast +- Maintain extension compatibility where possible +- Superior performance and stability + +--- + +## ✅ Recent Wins (Last Week) + +### Version 0.1.1 - Settings & Theming (Dec 24) +- ✅ **Per-Command Hotkeys** - Full system with SQLite storage, UI, conflict detection, defaults (Dec 23) +- ✅ **Window Management Global Hotkeys** - Auto-initialized defaults, works on ANY active window (Dec 24) +- ✅ **System Commands Complete** - Restart, volume controls, empty trash, eject drive with confirmations +- ✅ **Downloads Manager Enhanced** - Grid view, sort options, open/copy latest download commands +- ✅ **Extension Compatibility System** - Scoring, warnings, macOS pattern detection, Mach-O scanning +- ✅ **Comprehensive Settings System** - 6 tabs (General, Appearance, Extensions, Hotkeys, AI, Advanced, About) +- ✅ **9 Professional Themes** - Light, Dark, System + Tokyo Night, Dracula, Nord, Catppuccin, Gruvbox, One Dark +- ✅ **Close on Blur** - Window auto-hides when focus is lost (configurable) +- ✅ **Auto-Start on Login** - XDG autostart for Linux +- ✅ **Frecency Bug Fix** - Fixed nanosecond timestamp conversion +- ✅ **Window Edge Visibility** - Border/shadow on frameless window +- ✅ **Automated Version Management** - Single source in package.json + +### Extension Compatibility Fixed (Dec 22) +- ✅ `usePersistentState` now actually persists (was just `useState`) +- ✅ React Reconciler stubs return safe values instead of throwing errors +- ✅ TcpListener gracefully handles port conflicts (no more crashes) + +### Performance & Stability (Dec 22) +- ✅ Database indices added (clipboard, AI, snippets) - major query speedup +- ✅ N+1 query eliminated in file indexer - 10x faster indexing +- ✅ CPU monitoring moved to background thread - non-blocking UI +- ✅ Structured logging via `tracing` crate - production-ready + +### Code Quality (Dec 22) +- ✅ Debug `console.log` statements removed +- ✅ `println!`/`eprintln!` replaced with proper logging + +**Result:** 60% → **80% Raycast parity** + +--- + +## 🚀 Current Status + +### What Works Well + +| Feature | Status | Quality | Notes | +|---------|--------|---------|-------| +| Command Palette | ✅ Complete | Excellent | Fuzzy search, frecency ranking | +| Calculator | ✅ Complete | Excellent | SoulverCore integration | +| Clipboard History | ✅ Complete | Excellent | Text, images, colors, AES-GCM encryption | +| Snippets | ✅ Complete | Good | Rich placeholders, terminal detection | +| AI Chat | ✅ Complete | Excellent | Multi-provider, conversation history | +| File Search | ✅ Complete | Good | Fast indexing, watch for changes | +| Extensions | 🟡 Partial | Good | Basic compatibility, some limitations | +| System Monitors | ✅ Complete | Excellent | CPU, RAM, disk, battery, background updates | +| Quick Toggles | 🟡 Partial | Good | WiFi, Bluetooth, Dark Mode (DE-specific) | +| GitHub OAuth | ✅ Complete | Good | Token management via keyring | +| **Settings System** | ✅ Complete | Excellent | Multi-tab, persistence, themes | +| **Window Management** | ✅ Complete | Good | X11 snap/move/resize (343 LOC) | + +### Critical Gaps + +| Feature | Status | Impact | Blocking | +|---------|--------|--------|----------| +| **Per-Command Hotkeys** | ✅ Complete | Critical | SQLite storage, UI, defaults | +| **System Commands** | ✅ Complete | High | Restart, volume, trash, eject with confirmations | +| **Window Management** | ✅ Complete | High | Global hotkeys, auto-initialized, works on any window | +| **Downloads Manager** | ✅ Complete | Medium | File watching, SQLite, full UI, grid view | +| **Extension Compatibility** | ✅ Complete | High | Scoring, warnings, macOS detection | + +--- + +## 📋 Remaining Work + +### Phase 1: Core System Features (1-2 weeks) 🔴 + +**Goal:** 80% → 85% parity + +#### 1.1 Per-Command Hotkeys ✅ COMPLETED +**Status:** ✅ Complete (Dec 23) +**Impact:** CRITICAL - Major usability feature + +- [x] Extend `src-tauri/src/hotkey_manager.rs` +- [x] Store keybindings in SQLite +- [x] Settings UI for hotkey configuration (in existing Hotkeys tab) +- [x] Conflict detection (warn on duplicate bindings) +- [x] Default hotkeys: + - Clipboard History (Ctrl+Shift+V) + - Snippets (Ctrl+Shift+S) + - Window Management (Ctrl+Alt+Arrows) + - Lock Screen (Ctrl+Alt+L) + - And more... + +#### 1.2 System Commands ✅ COMPLETED +**Status:** ✅ Complete (Already was done!) +**Impact:** HIGH - Expected functionality + +**All Implemented:** +- [x] Lock screen (DE-specific detection) +- [x] Sleep (`systemctl suspend`) +- [x] Shutdown with confirmation +- [x] Restart with confirmation (`systemctl reboot`) +- [x] Volume up/down/mute (pactl + amixer fallback) +- [x] Set volume (0-100%) +- [x] Get volume status +- [x] Empty trash with confirmation +- [x] Eject drive (`udisksctl`) + +#### 1.3 Window Management ✅ COMPLETED +**Status:** ✅ Complete (Dec 24) +**Impact:** HIGH - Global window management via hotkeys + +**Fully Implemented:** +- [x] X11 window detection via `_NET_ACTIVE_WINDOW` (targets ANY active window) +- [x] Snap positions: left/right half, quarters, center, maximize, almost-maximize +- [x] Multi-monitor support via xrandr +- [x] Move window to different monitor +- [x] Commands in command palette UI +- [x] **Auto-initialized default hotkeys:** + - Ctrl+Alt+← Snap left + - Ctrl+Alt+→ Snap right + - Ctrl+Alt+↑ Snap top + - Ctrl+Alt+↓ Snap bottom + - Ctrl+Alt+M Maximize + - Ctrl+Alt+C Center +- [x] Works on ANY active window (Firefox, Terminal, VS Code, etc.) + +--- + +### Phase 2: Polish & Features (1-2 weeks) 🟡 + +**Goal:** 80% → 90%+ parity + +#### 2.1 Downloads Manager ✅ COMPLETED +**Status:** ✅ Complete (Already was done!) +**Impact:** MEDIUM + +**Fully Implemented:** +- [x] File watching via inotify (`notify` crate) +- [x] SQLite storage for download history +- [x] UI view with search/filter +- [x] Commands: list, open, show in folder, delete, clear +- [x] Automatic indexing on startup +- [x] Real-time detection of new downloads + +#### 2.2 Extension Compatibility ✅ COMPLETED +**Status:** ✅ Complete (Already was done!) +**Impact:** HIGH + +**Fully Implemented:** +- [x] Compatibility scoring system (0-100) +- [x] Heuristic detection for: + - AppleScript patterns + - macOS-specific paths + - macOS APIs (NSWorkspace, etc.) + - macOS shell commands + - Mach-O binaries +- [x] Warnings attached to PluginInfo +- [x] Binary substitution registry +- [x] Backend commands: `get_extension_compatibility`, `get_all_extensions_compatibility` +- [x] AppleScript shims with extensive pattern support + +#### 2.3 Testing Infrastructure (2 days) +- [ ] Add Rust unit tests (currently 0% coverage) + - [ ] snippets/engine.rs (placeholder expansion) + - [ ] extension_shims.rs (path translation) + - [ ] frecency.rs (scoring algorithm) + - [ ] soulver.rs (calculator) +- [ ] Enhance CI pipeline + - [ ] Add `cargo test` step + - [ ] Add `pnpm test:unit` step + - [ ] Run `cargo clippy -- -D warnings` + - [ ] PR-triggered workflows + +#### 2.4 Performance Profiling (1 day) +- [ ] Profile startup time (target: <500ms) +- [ ] Profile search latency (target: <50ms) +- [ ] Memory usage audit +- [ ] Bundle size optimization + +--- + +### Phase 3: Nice-to-Haves 🟢 + +**Timeline:** After 90% parity achieved + +| Feature | Effort | Priority | Notes | +|---------|--------|----------|-------| +| Menu Bar / System Tray | 3 days | Medium | Background indicator | +| Wayland Window Mgmt | 2 weeks | Medium | Compositor-specific (Sway/GNOME/KDE) | +| Settings Sync | 1 week | Medium | Cross-device sync | +| Extension Hot Reload | 2 days | Low | Dev experience | +| Trash Management | 1 day | Low | Restore from trash | + +--- + +## 🎨 Future Enhancements + +### Linux-Exclusive Features + +Features that go beyond Raycast: + +#### 1. Keyboard Maestro-Style Macros ⭐ +**Priority:** HIGH - Major differentiator + +- Record keyboard sequences +- Multiple trigger types (hotkey, typed string, schedule) +- Variable substitution (`{clipboard}`, `{date}`, `{shell:cmd}`) +- Conditional branching and loops + +**MVP Scope:** +- Keyboard recording only (no mouse) +- Hotkey triggers +- Basic actions: type text, key combo, delay, shell command +- Simple variables: `{clipboard}`, `{date}`, `{input}` + +#### 2. Scheduled Actions +- Run extensions on timers +- Cron-like scheduling +- Daily digest commands +- Delayed clipboard actions + +#### 3. Webhooks / Remote Triggers +- HTTP endpoints trigger commands +- Integration with n8n, Zapier, Home Assistant +- Authentication for security + +#### 4. Chained Commands / Pipes +- Command output → next command input +- Visual workflow builder +- Save pipelines as reusable workflows + +#### 5. Linux System Integration +- Systemd service control +- DBus-native toggles (faster than shell commands) +- Docker/Podman container management +- Flatpak/Snap integration + +--- + +## 📊 Strategic Priorities + +From most to least critical for Raycast replacement: + +| Rank | Initiative | Impact | Effort | Timeline | +|------|-----------|--------|--------|----------| +| 1 | **Per-Command Hotkeys** | Critical | Medium | Week 1 | +| 2 | **System Commands** | High | Low | Week 1 | +| 3 | **Window Mgmt UI** | Medium | Low | Week 2 | +| 4 | Downloads Manager | Medium | Medium | Week 2-3 | +| 5 | Extension Compatibility | High | Medium | Week 3-4 | +| 6 | Testing Infrastructure | Medium | Medium | Week 4 | +| 7 | Performance Tuning | Medium | Medium | Week 5 | +| 8 | Settings Sync | Medium | High | Future | +| 9 | Macro System | High | High | Future | + +--- + +## 🐛 Known Issues & Limitations + +### Extension Compatibility + +**What Works:** +- Pure UI extensions (lists, forms, detail views) - 90% +- Clipboard operations - 80% (HTML not supported) +- HTTP/API calls - 95% +- Local storage & preferences - 100% + +**What Doesn't:** +- AppleScript (only 4 basic patterns) - 10% +- Native macOS binaries - 0% +- macOS-specific system APIs - 5% +- Browser JS evaluation - 0% + +**AppleScript Coverage:** + +| Pattern | Status | +|---------|--------| +| `tell app "X" to activate` | ✅ Supported | +| `tell app "X" to quit` | ✅ Supported | +| `display notification` | ✅ Supported | +| `set volume` | ✅ Supported | +| `do shell script` | ❌ Not yet | +| `open location` | ❌ Not yet | +| `tell app "System Events"` | ❌ Complex | +| `tell app "Finder"` | ❌ Complex | + +### Platform Limitations + +| Feature | X11 | Wayland | Notes | +|---------|-----|---------|-------| +| Window Management | ✅ Works | 🟡 Partial | Compositor-specific | +| Global Hotkeys | ✅ Works | ✅ Works | Via Tauri plugin | +| Clipboard | ✅ Works | ✅ Works | Via Tauri plugin | +| Selected Text | ✅ Works | ⚠️ Limited | Wayland security model | +| Snippet Expansion | ✅ Works | ⚠️ Requires udev | Need keyboard access | + +--- + +## 📈 Progress Tracking + +### Milestones + +- [x] **v0.1.0** - Core features (command palette, clipboard, snippets, AI) +- [x] **v0.1.1** - Settings system, themes, window management +- [ ] **v0.2.0** - Per-command hotkeys, downloads manager (ETA: 2 weeks) +- [ ] **v0.3.0** - Extension improvements, testing (ETA: 4 weeks) +- [ ] **v0.4.0** - Polish & optimization (ETA: 6 weeks) +- [ ] **v0.5.0** - Linux-exclusive features (macros, webhooks) (ETA: 3 months) +- [ ] **v1.0.0** - 90%+ Raycast parity + stable API (ETA: 6 months) + +### Raycast Feature Parity + +``` +[████████████████████] 80% +``` + +**Breakdown:** +- Core UI/UX: 95% +- Built-in Commands: 70% +- Extension System: 65% +- System Integration: 75% (was 60%) +- Performance: 85% +- Settings & Customization: 90% + +--- + +## 🏗️ Architecture Notes + +### Multi-Process Design + +1. **Tauri Backend (Rust)** - System integration, database, file I/O +2. **Sidecar (Node.js)** - JavaScript runtime for extensions +3. **UI (WebView)** - Svelte 5 frontend + +**Communication:** MessagePack IPC via `@flare/protocol` package + +### Key Technologies + +- **Frontend:** Svelte 5, SvelteKit 2, Tailwind CSS 4 +- **Backend:** Rust, Tauri 2, Tokio async runtime +- **Database:** SQLite (rusqlite) with AES-GCM encryption +- **Credentials:** System keyring (Linux native) +- **Calculator:** SoulverCore (Swift wrapper) +- **Window Mgmt:** X11 via `x11rb` crate + +--- + +## 📝 Changelog + +### 2025-12-24 (v0.1.1) +- Per-command hotkeys with SQLite storage and conflict detection (Dec 23) +- Comprehensive settings system with 6 tabs +- 9 professional themes with instant switching +- Close on blur functionality +- Auto-start on login (XDG autostart) +- Frecency timestamp bug fix +- Window edge visibility improvements +- Automated version management +- **Parity:** 70% → 80% + +### 2025-12-22 (v0.1.0) +- Fixed `usePersistentState` to actually persist +- Fixed React Reconciler stubs (no-op instead of throw) +- Fixed TcpListener port conflict crash +- Added database indices for performance +- Eliminated N+1 query in file indexer +- Moved CPU monitoring to background thread +- Replaced println!/eprintln! with structured logging +- **Parity:** 60% → 70% + +### 2025-12-21 +- Created comprehensive audit and TODO +- Identified critical gaps +- Prioritized roadmap + +### Earlier Work +- ✅ AI chat with multi-provider support (OpenRouter, Ollama) +- ✅ Snippet editing UI with terminal detection +- ✅ File search indexing and watching +- ✅ OAuth integration (GitHub) +- ✅ System monitors with real-time updates +- ✅ Window management (X11) + +--- + +## 🎯 Next Actions (This Week) + +1. **Testing infrastructure** - Add unit tests (highest priority) +2. **Menu Bar/System Tray** - Background indicator +3. **Performance profiling** - Optimize startup and search +4. **Wayland support** - Window management for Wayland + +--- + +**Legend:** +- 🔴 Critical priority (needed for Raycast replacement) +- 🟡 High priority (important but not blocking) +- 🟢 Medium/Low priority (nice to have) +- ✅ Complete +- 🟡 Partial +- ❌ Not started diff --git a/clean-builds.sh b/clean-builds.sh new file mode 100755 index 00000000..e9275cd7 --- /dev/null +++ b/clean-builds.sh @@ -0,0 +1,72 @@ +#!/bin/bash +# Flareup Build Cache Cleanup Script + +set -e + +cd ~/scratch/flareup + +echo "================================" +echo "Flareup Build Cache Cleanup" +echo "================================" +echo "" + +# Show current sizes +echo "Current build cache sizes:" +du -sh src-tauri/target/debug 2>/dev/null || echo " debug: not found" +du -sh src-tauri/target/release 2>/dev/null || echo " release: not found" +du -sh src-tauri/target/release-fast 2>/dev/null || echo " release-fast: not found" +echo "" +du -sh src-tauri/target 2>/dev/null +echo "" + +# Ask what to clean +echo "What would you like to clean?" +echo "" +echo "1) Debug builds only (21GB) - Recommended for active development" +echo "2) Release builds only (5.1GB + 4.5GB)" +echo "3) Everything (31GB) - Clean slate" +echo "4) Cancel" +echo "" +read -p "Choose option (1-4): " choice + +case $choice in + 1) + echo "" + echo "Cleaning debug builds (21GB)..." + rm -rf src-tauri/target/debug + echo "✓ Debug builds removed" + ;; + 2) + echo "" + echo "Cleaning release builds (9.6GB)..." + rm -rf src-tauri/target/release + rm -rf src-tauri/target/release-fast + echo "✓ Release builds removed" + ;; + 3) + echo "" + echo "Cleaning all builds (31GB)..." + # Using cargo clean is cleaner than rm -rf + cd src-tauri + cargo clean + cd .. + echo "✓ All builds removed" + ;; + 4) + echo "Cancelled" + exit 0 + ;; + *) + echo "Invalid option" + exit 1 + ;; +esac + +echo "" +echo "After cleanup:" +du -sh src-tauri/target 2>/dev/null || echo " target: cleaned completely" +echo "" +echo "To rebuild:" +echo " Development: cargo build (or just run the app)" +echo " Release: cargo build --release" +echo "" diff --git a/docs/EXTENSION_COMPATIBILITY.md b/docs/EXTENSION_COMPATIBILITY.md new file mode 100644 index 00000000..51e69595 --- /dev/null +++ b/docs/EXTENSION_COMPATIBILITY.md @@ -0,0 +1,128 @@ +# Extension Compatibility Layer + +## Overview + +The extension compatibility layer provides Linux equivalents for macOS-specific APIs commonly used in Raycast extensions. This allows many Raycast extensions to run on Flareup without modification. + +## Features + +### 1. Path Translation + +Automatically translates macOS paths to Linux equivalents: + +- `/Applications/` → `/usr/share/applications/` +- `/Library/` → `/usr/lib/` +- `/Users/` → `/home/` +- `~/Library/Application Support/` → `~/.local/share/` +- `~/Library/Preferences/` → `~/.config/` + +### 2. AppleScript Shim + +Provides Linux equivalents for common AppleScript commands: + +#### Supported Patterns + +- **Application Activation**: `tell application "AppName" to activate` + - Translated to: `gtk-launch` or `xdg-open` + +- **Application Quit**: `tell application "AppName" to quit` + - Translated to: `pkill -f appname` + +- **Notifications**: `display notification "message" with title "title"` + - Translated to: `notify-send "title" "message"` + +- **Volume Control**: `set volume N` + - Translated to: `pactl set-sink-volume @DEFAULT_SINK@ N%` or `amixer set Master N%` + +### 3. System Information + +Provides cross-platform system information: + +- Platform detection (always returns "linux") +- Architecture (x86_64, aarch64, etc.) +- Hostname +- Desktop environment (GNOME, KDE, etc.) + +## Usage in Extensions + +Extensions using macOS-specific APIs will automatically use the shims when running on Flareup: + +```typescript +import { runAppleScript } from "@raycast/api"; + +// This will work on Linux through the shim layer +await runAppleScript('display notification "Hello" with title "Flareup"'); +``` + +## API Reference + +### Rust Backend + +Located in `src-tauri/src/extension_shims.rs`: + +- `PathShim::translate_path(path: &str) -> String` +- `AppleScriptShim::run_apple_script(script: &str) -> ShimResult` +- `SystemShim::get_system_info() -> HashMap` + +### Tauri Commands + +- `shim_translate_path(path: String) -> String` +- `shim_run_applescript(script: String) -> ShimResult` +- `shim_get_system_info() -> HashMap` + +### TypeScript/Sidecar API + +Located in `sidecar/src/api/shims.ts`: + +```typescript +// Translate a macOS path to Linux equivalent +const linuxPath = await translatePath("/Applications/Safari.app"); + +// Run AppleScript with automatic translation +const result = await runAppleScript('tell application "Firefox" to activate'); + +// Get system information +const sysInfo = await getSystemInfo(); +``` + +## Limitations + +### Not Supported + +1. **Complex AppleScript**: Only common patterns are supported. Complex scripts with conditionals, loops, or custom handlers will not work. + +2. **Native Binaries**: Extensions that bundle macOS-specific binaries (Mach-O format) cannot be shimmed. Flareup will detect these at install time and warn you. + +3. **System-Specific Features**: Some macOS features have no Linux equivalent (e.g., specific Finder operations, macOS-only system preferences). + +### Partial Support + +1. **Application Launching**: Works for applications with desktop files. May not work for all applications. + +2. **Volume Control**: Requires PulseAudio/PipeWire or ALSA. May not work on all audio setups. + +## Extension Compatibility Heuristics + +When installing extensions, Flareup runs heuristic checks to detect potential incompatibilities: + +1. **Mach-O Binary Detection**: Warns if macOS-only executable files are found in the extension +2. **AppleScript Detection**: Warns if `runAppleScript` is used +3. **Path Detection**: Warns if hardcoded macOS paths are found + +Users are prompted to confirm installation if potential issues are detected. + +## Testing + +Run the Rust tests: + +```bash +cargo test --package flare --lib extension_shims::tests +``` + +## Future Enhancements + +- [ ] Support for more AppleScript patterns +- [ ] Automatic path rewriting in extension code +- [ ] Extension compatibility database +- [ ] Better error messages for unsupported features +- [ ] Fallback mechanisms for common operations diff --git a/docs/archive/AUDIT_REPORT.md b/docs/archive/AUDIT_REPORT.md new file mode 100644 index 00000000..1d8a2669 --- /dev/null +++ b/docs/archive/AUDIT_REPORT.md @@ -0,0 +1,980 @@ +# Flareup Comprehensive Audit Report +**Date:** 2025-12-21 +**Version:** 0.1.0 +**Goal:** Replace Raycast on Linux with similar or better functionality + +--- + +## Executive Summary + +Flareup is an **ambitious and well-architected** Tauri-based launcher for Linux attempting to replicate Raycast's functionality. The codebase demonstrates solid engineering principles with modern technologies (Rust, Svelte 5, SQLite) and impressive feature coverage for a v0.1.0 project. + +**Key Strengths:** +- Clean architecture with clear separation of concerns +- Excellent UI/UX with strong keyboard navigation and accessibility +- Comprehensive system integration (clipboard, snippets, AI, file search) +- Secure credential management via system keyring +- Active development with regular feature additions + +**Critical Gaps for Raycast Replacement:** +1. **No window management** (move, resize, snap windows) +2. **No system commands** (shutdown, sleep, volume control) +3. **Limited global hotkey support** (only app toggle, no per-command hotkeys) +4. **Incomplete extension compatibility** (macOS-centric API limitations) +5. **Performance bottlenecks** (database indexing, N+1 queries, blocking operations) + +**Overall Assessment:** Solid foundation with ~60% Raycast parity. Needs 3-6 months of focused development on critical missing features and performance optimization to be a viable replacement. + +--- + +## 1. Code Quality Analysis + +### 1.1 Bugs and Issues + +#### Critical +- **CommandPalette.svelte:95** - Debug console.log leftover: `console.log('null haha');` +- **17 Rust files** contain `.unwrap()` or `.expect()` calls that could panic in production +- **N+1 Query Problem** in file_search/indexer.rs - queries DB for every file during indexing + +#### High Priority +- **Noisy logging** - lib.rs logs every global shortcut event (pressed/released) to stdout +- **Blocking operations** in async contexts (ai.rs database calls block async runtime) +- **Hardcoded WebSocket port** (7265) in browser_extension.rs could cause conflicts +- **Missing database indices** on frequently queried columns (created, updated_at, content_type) + +#### Medium Priority +- Multiple TODO comments in TypeScript/Svelte files (none in Rust): + - `assets.ts:32,52,57` - "TODO: better heuristic?" + - `CommandDeeplinkConfirm.svelte:33` - "TODO: implement 'always open'" + - `nodes/shared/actions.ts:7` - "TODO: naming?" + - `sidecar/src/api/cache.ts:37` - Unclear fix comment needing documentation +- Debug console.log statements in 7+ files +- Commit message indicates snippet work "not 100 percent happy" (c052e1a) + +### 1.2 Code Smells + +```rust +// sidecar/src/api/cache.ts:37 +// no idea what this does but it fixes the bug of "cannot read property subscribe of undefined" +``` + +**Recommended Actions:** +1. Remove all debug console.log statements +2. Replace `.unwrap()` with proper error handling using `?` operator or `match` +3. Add database indices for performance +4. Move blocking operations to `tokio::task::spawn_blocking` +5. Add proper logging framework instead of println! (use `tracing` crate) + +--- + +## 2. Feature Completeness vs Raycast + +### 2.1 Implemented Features ✅ + +| Feature | Status | Quality | Notes | +|---------|--------|---------|-------| +| Command Palette | ✅ Implemented | High | Fuzzy search, frecency ranking | +| Calculator | ✅ Implemented | High | SoulverCore integration | +| Clipboard History | ✅ Implemented | High | Text, images, colors, encryption | +| Snippets/Text Expansion | ✅ Implemented | Medium | Rich placeholders, terminal detection WIP | +| AI Integration | ✅ Implemented | High | Multi-provider, conversation history | +| File Search | ✅ Implemented | Medium | Custom indexing, limited scope | +| Extensions API | 🟡 Partial | Medium | Basic compatibility, macOS limitations | +| System Monitors | ✅ Implemented | High | CPU, memory, disk, battery | +| Quick Toggles | 🟡 Partial | Medium | WiFi, Bluetooth, Dark Mode | +| GitHub Integration | ✅ Implemented | Medium | OAuth, basic API support | + +### 2.2 Missing Critical Features ❌ + +| Feature | Priority | Impact | Complexity | +|---------|----------|--------|------------| +| **Window Management** | 🔴 Critical | High | High - requires X11/Wayland APIs | +| **System Commands** | 🔴 Critical | High | Medium - systemctl, amixer, loginctl | +| **Global Hotkeys (per-command)** | 🔴 Critical | High | Medium - extend existing system | +| **Menu Bar Extra** | 🟡 High | Medium | Medium - system tray integration | +| **Fallback Commands** | 🟡 High | Low | Low - config system exists | +| **Extension Hot Reload** | 🟡 High | Medium | Medium - file watcher needed | +| **Trash Management** | 🟢 Medium | Low | Low - shell integration | +| **Scheduled Actions** | 🟢 Medium | Medium | High - cron-like scheduler | +| **Webhooks/Remote Triggers** | 🟢 Low | Medium | High - HTTP server needed | + +### 2.3 Feature Details + +#### Window Management (CRITICAL MISSING) +**Current State:** None +**Raycast Features:** +- Move window to left/right half, center, corners +- Resize to specific dimensions +- Move to next/previous desktop +- Maximize, minimize, fullscreen + +**Implementation Path:** +```rust +// For X11 +use x11rb::protocol::xproto::*; + +// For Wayland +use wayland_client::*; + +// Create new module: src-tauri/src/window_manager.rs +#[tauri::command] +async fn move_window_to_half(direction: String) -> Result<(), String> { + // Detect if X11 or Wayland + // Use appropriate API to manipulate active window +} +``` + +**Recommended Tools:** +- X11: `wmctrl`, `xdotool`, or direct x11rb API +- Wayland: compositor-specific protocols (sway IPC, KWin D-Bus) + +#### System Commands (CRITICAL MISSING) +**Current State:** System monitors only (read-only) +**Needed Commands:** + +```rust +// src-tauri/src/system_commands.rs +#[tauri::command] +async fn shutdown() -> Result<(), String> { + Command::new("systemctl").args(["poweroff"]).spawn()?; + Ok(()) +} + +#[tauri::command] +async fn set_volume(level: u8) -> Result<(), String> { + // Use pactl or amixer + Command::new("pactl") + .args(["set-sink-volume", "@DEFAULT_SINK@", &format!("{}%", level)]) + .spawn()?; + Ok(()) +} +``` + +**Missing Commands:** +- Sleep (`systemctl suspend`) +- Restart (`systemctl reboot`) +- Lock Screen (`loginctl lock-session`) +- Volume Up/Down/Mute (`pactl` or `amixer`) +- Empty Trash (`rm -rf ~/.local/share/Trash/*`) +- Eject drives (`udisksctl unmount -b /dev/sdX`) + +#### Global Hotkeys (CRITICAL MISSING) +**Current State:** Single hotkey (Super+Alt+Space) to toggle app +**Needed:** Per-command hotkey binding + +**Implementation:** +```rust +// Extend lib.rs global shortcut system +let mut hotkey_manager = state.hotkey_manager.lock().unwrap(); + +// Allow users to register custom shortcuts +hotkey_manager.register("Cmd+Shift+C", "clipboard_history")?; +hotkey_manager.register("Cmd+Shift+S", "snippets")?; + +// On hotkey trigger, emit event to frontend with command ID +``` + +--- + +## 3. Performance Optimization Opportunities + +### 3.1 Database Performance + +#### Missing Indices (High Impact) +```sql +-- ai.rs +CREATE INDEX IF NOT EXISTS idx_ai_generations_created ON ai_generations(created); +CREATE INDEX IF NOT EXISTS idx_ai_conversations_updated ON ai_conversations(updated_at); + +-- clipboard_history/manager.rs +CREATE INDEX IF NOT EXISTS idx_clipboard_content_type ON clipboard_history(content_type); +CREATE INDEX IF NOT EXISTS idx_clipboard_pinned ON clipboard_history(is_pinned); +CREATE INDEX IF NOT EXISTS idx_clipboard_last_copied ON clipboard_history(last_copied_at); + +-- snippets/manager.rs +CREATE INDEX IF NOT EXISTS idx_snippets_keyword ON snippets(keyword); +``` + +**Expected Impact:** 5-10x faster queries on large datasets (>1000 items) + +#### N+1 Query Problem (Critical) +**File:** `file_search/indexer.rs:build_initial_index()` + +**Current (slow):** +```rust +for entry in walker { + if let Ok(Some(indexed_time)) = manager.get_file_last_modified(&path) { + // Individual SELECT for each file + } +} +``` + +**Optimized:** +```rust +// Load all file timestamps into HashMap once +let existing_files = manager.get_all_file_timestamps()?; + +for entry in walker { + if let Some(indexed_time) = existing_files.get(&path) { + // In-memory lookup (instant) + } +} +``` + +**Expected Impact:** 100x faster initial indexing for large file systems + +#### Full-Text Search for Snippets +**Current:** `LIKE %...%` forces full table scan +**Recommended:** Implement SQLite FTS5 + +```sql +CREATE VIRTUAL TABLE snippets_fts USING fts5( + keyword, + content, + content=snippets, + content_rowid=id +); +``` + +### 3.2 Memory & Caching + +#### Coarse-Grained App Cache (Medium Impact) +**File:** `cache.rs:is_stale()` + +**Issue:** Invalidates entire app cache if ANY .desktop file changes +**Fix:** Track modification times per-file, only re-parse changed files + +```rust +// Store: HashMap +// Only invalidate and re-parse files with newer timestamps +``` + +**Expected Impact:** 10x faster app cache updates + +#### Batch Database Operations (High Impact) +**File:** `file_search/manager.rs:add_file()` + +**Current:** Single INSERT per file during indexing +**Recommended:** Batch inserts in transactions + +```rust +let tx = conn.transaction()?; +for file in files { + tx.execute("INSERT INTO ...", params![])?; +} +tx.commit()?; +``` + +**Expected Impact:** 50x faster bulk indexing + +### 3.3 Blocking Operations + +#### CPU Monitor Blocking Sleep (High Priority) +**File:** `system_monitors.rs:get_cpu_info()` + +```rust +// Current: blocks thread pool worker +std::thread::sleep(Duration::from_millis(200)); + +// Recommended: background thread with cached state +static CPU_INFO: Lazy>> = Lazy::new(|| { + // Spawn background thread that updates every 200ms + // Commands return cached value instantly +}); +``` + +#### Shell Command Overhead (Medium Priority) +**File:** `quick_toggles.rs` + +**Current:** Spawns `nmcli`, `rfkill` processes on every call +**Recommended:** Use native D-Bus bindings + +```rust +// Replace shell calls with: +use zbus::Connection; + +async fn get_wifi_state() -> Result { + let conn = Connection::system().await?; + let proxy = NetworkManagerProxy::new(&conn).await?; + Ok(proxy.wireless_enabled().await?) +} +``` + +**Expected Impact:** 10x faster state queries + +### 3.4 Startup Time + +#### Sequential Database Initialization (Medium Impact) +**File:** `lib.rs:setup()` + +**Current:** Sequential initialization of 5 managers +**Recommended:** Parallel initialization + +```rust +use rayon::prelude::*; + +let managers = vec![ + spawn(|| AiUsageManager::new(app_handle.clone())), + spawn(|| QuicklinkManager::new(app_handle.clone())), + // ... etc +]; + +let results: Vec<_> = managers.into_par_iter() + .map(|t| t.join().unwrap()) + .collect(); +``` + +**Expected Impact:** 2-3x faster startup on multi-core systems + +**Alternative:** Lazy initialization on first access +```rust +// Only initialize when actually needed +static AI_MANAGER: OnceCell = OnceCell::new(); +``` + +--- + +## 4. UI/UX Analysis + +### 4.1 Strengths +- **Keyboard Navigation:** Comprehensive, all views fully keyboard accessible +- **Focus Management:** Excellent with dedicated `focusManager` system +- **Loading States:** Consistent loading indicators and spinners +- **Empty States:** Helpful guidance when views are empty +- **Error Handling:** Toast notifications for all async operations +- **Design Consistency:** Strict adherence to design system via Bits UI + +### 4.2 Accessibility +**Rating: High (8/10)** + +**Pros:** +- Complete keyboard control +- Bits UI primitives handle ARIA automatically +- Focus trap prevention +- Semantic HTML structure + +**Recommendations:** +1. Verify `BaseList.svelte` sets `role="listbox"` and `role="option"` for screen readers +2. Add `aria-live` regions for dynamic content updates (AI streaming) +3. Test with screen readers (Orca on Linux) +4. Ensure color contrast meets WCAG AA standards (especially `text-muted-foreground`) + +### 4.3 Responsive Design +**Rating: Medium-High (Desktop Optimized)** + +Appropriately optimized for fixed-size desktop window. Not mobile-responsive, which is correct for this use case. + +### 4.4 User Feedback +**Rating: High (9/10)** + +- Comprehensive toast notifications +- Inline form validation +- Confirmation dialogs for dangerous actions +- Clear error messages + +**Minor Issue:** Some errors only log to console (extension store errors) + +--- + +## 5. Architecture & Code Structure + +### 5.1 Strengths +- **Clean separation:** Frontend (Svelte) / Backend (Rust) / Sidecar (Node.js) +- **Modern stack:** Svelte 5 runes, Tauri 2.x, async Rust +- **Modular design:** Each feature is a separate module +- **Type safety:** TypeScript + Rust ensures compile-time checks +- **Security:** Proper credential storage via system keyring + +### 5.2 Areas for Improvement + +#### Large Modules +- **ai.rs:** 726 lines - should split into ai/mod.rs, ai/client.rs, ai/storage.rs +- **extensions.rs:** 631 lines - split into extensions/loader.rs, extensions/compatibility.rs +- **lib.rs:** 661 lines - extract window management, hotkey system into modules + +#### Error Handling +**17 files** use `.unwrap()` or `.expect()` which can panic: +- snippets/input_manager.rs +- snippets/manager.rs +- file_search/manager.rs +- clipboard_history/manager.rs +- And 13 more... + +**Recommended Pattern:** +```rust +// Replace +let value = risky_operation().unwrap(); + +// With +let value = risky_operation() + .map_err(|e| format!("Failed to X: {}", e))?; +``` + +#### Logging +**Current:** Mix of `println!`, `eprintln!`, `console.log`, and no structured logging +- `println!` used for info/status messages (~15 occurrences) +- `eprintln!` used for error logging (~45 occurrences) - goes to stderr, slightly better +- `console.log` in frontend (~10 occurrences, some debug leftovers) + +**Recommended:** Implement `tracing` crate for Rust backend + +```rust +use tracing::{info, error, debug, warn}; + +// Instead of println!/eprintln! +info!("Starting file index build"); +debug!("Indexed {} files", count); +warn!("Directory not found: {}", path); +error!("Failed to index {}: {}", path, err); +``` + +**Benefits of tracing:** +- Structured logging with spans and events +- Configurable log levels at runtime +- Integration with log aggregation tools +- Better performance than println! + +--- + +## 6. Security Considerations + +### 6.1 Good Practices ✅ +- System keyring for API keys (not plaintext) +- Input validation for snippet placeholders +- Extension compatibility checking before installation +- Proper error handling prevents exposing sensitive data + +### 6.2 Potential Concerns ⚠️ + +#### Global Keyboard Interception +**File:** `snippets/input_manager.rs` + +Requires elevated permissions (udev rules) to read `/dev/input/eventX`. This is necessary for snippets but could be a security vector if compromised. + +**Mitigation:** Already documented in README. Consider adding runtime permission checks. + +#### System Command Execution +**File:** `quick_toggles.rs`, planned `system_commands.rs` + +Executes `nmcli`, `rfkill`, future `systemctl` commands. Ensure no user input is passed unsanitized. + +**Current:** Safe (no user input in shell commands) +**Future:** Validate any dynamic parameters + +#### Extension Loading +**File:** `extensions.rs` + +Loads and executes code from external sources (Raycast store). + +**Current Mitigation:** +- Heuristic checks for macOS-only APIs +- Sandboxed Node.js sidecar process +- No native code execution + +**Recommendation:** Consider allowlist/blocklist of known safe extensions + +--- + +## 7. Platform-Specific Challenges + +### 7.1 Linux Desktop Fragmentation + +#### Wayland vs X11 +**Current:** Detects session type, uses appropriate APIs +**Challenge:** Wayland support incomplete for: +- Global hotkeys (works via evdev) +- Window manipulation (compositor-dependent) +- Selected text access (X11-only currently) + +**Recommendation:** +1. Add Wayland compositor detection (Sway, GNOME, KDE) +2. Implement compositor-specific protocols: + - Sway: IPC socket + - GNOME: D-Bus extensions + - KDE: KWin scripts + +#### Terminal Detection +**File:** `snippets/input_manager.rs:123-162` + +Hardcoded list of 40+ terminal emulators. Brittle and requires constant updates. + +**Current:** +```rust +const TERMINAL_EMULATORS: &[&str] = &[ + "gnome-terminal", "konsole", "alacritty", /* ... 37 more */ +]; +``` + +**Recommended Approach:** +```rust +// Check if process is a TTY +fn is_terminal_window(class: &str) -> bool { + // 1. Check against known list (fast path) + // 2. Check if WM_CLASS contains "term" (heuristic) + // 3. Query process for TTY file descriptor + class.to_lowercase().contains("term") || + TERMINAL_EMULATORS.contains(&class) +} +``` + +### 7.2 Desktop Environment Support + +**Tested:** GNOME, KDE/Plasma (via D-Bus) +**Unknown:** Cinnamon, MATE, XFCE, i3, Sway, Hyprland + +**Recommendation:** Add detection and fallback scripts for: +- Dark mode toggle +- System notifications +- Tray icon support + +--- + +## 8. Extension System Analysis + +### 8.1 Current State + +**Architecture:** +``` +Flareup (Tauri) + ↓ MessagePack IPC +Node.js Sidecar (React Reconciler) + ↓ Imports +Raycast Extension (JavaScript/TypeScript) +``` + +**Compatibility Layer:** +- Path translation: `/Applications/` → `/usr/share/applications/` +- AppleScript shimming (basic pattern matching) +- Mock implementations of macOS-only APIs + +### 8.2 Limitations + +#### Fundamental Incompatibility +**Issue:** Raycast extensions are macOS-centric by design + +**Blocked Features:** +- Native Swift bindings (can't run on Linux) +- AppleScript (no Linux equivalent) +- macOS-specific paths and APIs +- Spotlight integration +- Finder operations + +**Success Rate Estimate:** +- Simple extensions (web APIs, HTTP): ~80% compatible +- Medium complexity (file operations): ~50% compatible +- macOS-dependent (system control): ~10% compatible + +### 8.3 Recommendations + +#### Short-term +1. Improve compatibility detection (currently heuristic-based) +2. Add explicit extension compatibility ratings in UI +3. Create Linux-specific extension guidelines + +#### Long-term +1. Fork popular extensions to create Linux versions +2. Build native Flareup extension API (not Raycast-compatible) +3. Create extension converter tool (Raycast → Flareup) + +**Example Native API:** +```typescript +// flareup-sdk +import { Flareup } from '@flareup/api'; + +export default function Command() { + return ( + + + + + } + /> + + ); +} +``` + +--- + +## 9. Testing & Quality Assurance + +### 9.1 Current Testing State +**Analysis:** Frontend testing infrastructure exists with good coverage for key components + +**Existing Test Files:** +- `src/lib/components/Extensions.svelte.test.ts` (293 lines) - Comprehensive tests for extension store +- `src/lib/components/command-palette/CommandPalette.svelte.test.ts` (472 lines) - Full coverage of command palette + +**Testing Stack Already Configured:** +- vitest (test runner) +- @testing-library/svelte (component testing) +- @testing-library/jest-dom (DOM assertions) +- @testing-library/user-event (user interaction simulation) +- playwright (E2E testing - configured but no tests yet) +- jsdom (DOM environment) + +**Gaps in Test Coverage:** +- **Rust backend:** 0 test coverage (critical gap) +- **Sidecar:** No tests for Node.js extension host +- **Integration:** No Tauri <-> sidecar IPC tests +- **E2E:** Playwright configured but no test files + +### 9.2 Recommended Test Expansion + +#### Rust Unit Tests (High Priority - Currently Missing) +```rust +// src-tauri/src/snippets/engine_test.rs +#[cfg(test)] +mod tests { + #[test] + fn test_placeholder_expansion() { + let result = expand_placeholder("{clipboard}", context); + assert_eq!(result, "expected_value"); + } + + #[test] + fn test_date_formatting() { + let result = expand_date_placeholder("YYYY-MM-DD"); + // Assert format is correct + } +} +``` + +**Critical Areas Needing Rust Tests:** +- Snippet placeholder expansion (`snippets/engine.rs`) +- Path translation (`extension_shims.rs`) +- Frecency scoring (`frecency.rs`) +- Calculator integration (`soulver.rs`) + +#### Integration Tests (Medium Priority) +- Extension loading and execution +- Database migrations +- IPC communication between Tauri and sidecar + +#### E2E Tests (Low Priority) +- Full user workflows using existing Playwright setup +- Keyboard navigation +- Extension installation + +### 9.3 CI Pipeline Status + +**Existing CI:** `.github/workflows/nightly.yml` +- Builds AppImage on schedule (daily at 23:15 UTC) +- Handles Swift wrapper compilation +- Caches Rust dependencies +- Supports debug/release builds + +**Missing from CI:** +- Test execution (`cargo test`, `pnpm test:unit`) +- Linting (`cargo clippy`) +- Format checking (`cargo fmt --check`) +- PR-triggered builds (currently only nightly + manual) + +**Recommended CI Enhancement:** +```yaml +# Add to nightly.yml or create new pr.yml +- name: Run Rust tests + run: cargo test --all-features + +- name: Run frontend tests + run: pnpm test:unit + +- name: Run clippy + run: cargo clippy -- -D warnings + +- name: Check formatting + run: cargo fmt -- --check +``` + +### 9.4 Quality Metrics + +**Recommended Additional Tools:** +```toml +# Cargo.toml +[dev-dependencies] +criterion = "0.5" # Benchmarking +proptest = "1.0" # Property-based testing +mockall = "0.12" # Mocking + +[lints.rust] +unsafe_code = "forbid" +missing_docs = "warn" +``` + +--- + +## 10. Dependency Analysis + +### 10.1 Rust Dependencies + +**Heavy Dependencies (Potential Optimization):** +- `sysinfo` (221 KB) - system monitoring +- `tokio` (full features) - consider feature flags +- `reqwest` (full features) - only need basic HTTP + +**Recommended:** +```toml +# Instead of +reqwest = { version = "0.11", features = ["json", "cookies", ...] } + +# Use +reqwest = { version = "0.11", default-features = false, features = ["json", "rustls-tls"] } +``` + +### 10.2 JavaScript Dependencies + +**Bundle Size Analysis Recommended:** +```bash +pnpm install -g source-map-explorer +pnpm build +source-map-explorer dist/**/*.js +``` + +**Potential Optimizations:** +- Code splitting for extensions view +- Lazy load settings view +- Tree-shake unused Bits UI components + +--- + +## 11. Documentation Gaps + +### 11.1 Missing Documentation + +**User Documentation:** +- [ ] Quickstart guide +- [ ] Keyboard shortcuts reference +- [ ] Extension compatibility list +- [ ] Troubleshooting guide + +**Developer Documentation:** +- [ ] Architecture overview +- [ ] Contributing guidelines +- [ ] Extension development guide +- [ ] API documentation (rustdoc) + +**Operational:** +- [ ] Performance tuning guide +- [ ] Database migration guide +- [ ] Backup/restore procedures + +### 11.2 Recommended Structure + +``` +docs/ +├── user-guide/ +│ ├── installation.md +│ ├── features/ +│ │ ├── snippets.md +│ │ ├── clipboard.md +│ │ └── ai-chat.md +│ └── troubleshooting.md +├── developer/ +│ ├── architecture.md +│ ├── building.md +│ └── contributing.md +└── api/ + ├── rust/ # Generated via cargo doc + └── extensions/ # Extension API reference +``` + +--- + +## 12. Prioritized Recommendations + +### 12.1 Critical (Do First) 🔴 + +1. **Remove Debug Code** (1 hour) + - Remove `console.log('null haha')` from CommandPalette.svelte:95 + - Remove all debug console.log statements + - Replace println! with proper logging + +2. **Fix Performance Bottlenecks** (1 day) + - Add database indices (ai, clipboard, snippets tables) + - Fix N+1 query in file_search/indexer.rs + - Batch database operations in transactions + +3. **Implement Window Management** (2 weeks) + - X11 support first (wmctrl or x11rb) + - Wayland support (compositor-specific) + - Commands: move to half, center, maximize, next desktop + +4. **Add System Commands** (1 week) + - Sleep, restart, shutdown, lock + - Volume control + - Empty trash + - Eject drives + +5. **Global Hotkeys for Commands** (1 week) + - Extend existing hotkey system + - Per-command keybinding configuration + - Settings UI for hotkey management + +### 12.2 High Priority (Next Phase) 🟡 + +6. **Error Handling Audit** (3 days) + - Replace `.unwrap()` with proper error handling + - Add context to errors + - Implement tracing for structured logging + +7. **Performance Optimization** (1 week) + - CPU monitor background thread + - Replace shell commands with native D-Bus + - Parallel database initialization + - Implement FTS5 for snippet search + +8. **Extension Compatibility** (2 weeks) + - Improve detection heuristics + - Add compatibility ratings in UI + - Create Linux-specific extension guidelines + - Fork and adapt top 10 popular extensions + +9. **Testing Infrastructure** (1 week) + - Unit tests for critical modules + - Integration tests for IPC + - CI pipeline with automated testing + +10. **Wayland Improvements** (1 week) + - Compositor detection + - Sway IPC integration + - GNOME/KDE D-Bus extensions + - Selected text access on Wayland + +### 12.3 Medium Priority (Future Releases) 🟢 + +11. **Module Refactoring** (3 days) + - Split ai.rs into submodules + - Split extensions.rs into loader/compatibility + - Extract hotkey system from lib.rs + +12. **Terminal Detection Improvements** (2 days) + - Heuristic-based fallback + - Process TTY detection + - User override settings + +13. **Documentation** (1 week) + - User guide + - Developer documentation + - API documentation (rustdoc) + - Extension development guide + +14. **UI/UX Polish** (1 week) + - ARIA improvements for screen readers + - Keyboard trap prevention audit + - Color contrast verification + - Animation/transition polish + +15. **Feature Completeness** (2 weeks) + - Menu Bar Extra / Tray Icon + - Fallback commands configuration + - Extension hot reload + - Trash management commands + +### 12.4 Low Priority (Nice to Have) 🔵 + +16. **Advanced Features** (4+ weeks) + - Keyboard Maestro-like macros + - Scheduled actions/automations + - Webhooks and remote triggers + - Headless/background extensions + - File actions/contextual actions + - Chained commands/pipes + +17. **Optimization** (Ongoing) + - Bundle size reduction + - Code splitting + - Lazy loading for settings + - Memory usage profiling + +--- + +## 13. Estimated Timeline + +### Phase 1: Core Stability (2-3 weeks) +- Fix debug code and logging +- Performance optimizations +- Error handling improvements +- Basic testing infrastructure + +### Phase 2: Raycast Parity (4-6 weeks) +- Window management +- System commands +- Global hotkeys +- Extension improvements + +### Phase 3: Polish & Performance (2-3 weeks) +- Wayland support improvements +- UI/UX refinements +- Documentation +- Testing coverage + +### Phase 4: Advanced Features (8-12 weeks) +- Menu bar extra +- Advanced automation +- Native extension API +- Community extensions + +**Total Estimated Time to Viable Raycast Replacement:** 3-6 months of focused development + +--- + +## 14. Resource Requirements + +### Development Team +- **1 Senior Rust Developer** (backend, system integration) +- **1 Frontend Developer** (Svelte, UI/UX) +- **1 Linux Systems Expert** (X11/Wayland, desktop environments) +- **Optional: 1 Technical Writer** (documentation) + +### Infrastructure +- CI/CD pipeline (GitHub Actions) +- Test machines covering: + - X11 (Ubuntu, Fedora) + - Wayland (GNOME, KDE, Sway) + - Various desktop environments +- Performance monitoring tools + +--- + +## 15. Conclusion + +Flareup has a **solid foundation** and demonstrates impressive engineering for a v0.1.0 project. The architecture is sound, the codebase is well-structured, and many features are already implemented with high quality. + +**Key Achievements:** +- Excellent UI/UX and accessibility +- Comprehensive system integration +- Secure credential management +- Modern, maintainable codebase + +**Path to Success:** +To become a viable Raycast replacement, focus on: +1. **Critical missing features** (window management, system commands, global hotkeys) +2. **Performance optimization** (database indexing, query optimization) +3. **Code quality** (remove debug code, improve error handling, add tests) +4. **Platform support** (Wayland improvements, desktop environment compatibility) + +With 3-6 months of focused development following the prioritized recommendations above, Flareup could achieve **feature parity with Raycast** and potentially exceed it with Linux-specific optimizations and native integrations. + +**Recommended Next Steps:** +1. Review this audit with the team +2. Create GitHub issues for each recommendation +3. Set up project roadmap with milestones +4. Begin with Phase 1 (Core Stability) items +5. Engage community for extension development and testing + +--- + +**Audit Conducted By:** Claude Sonnet 4.5 +**Reviewed By:** Claude Opus 4.5 +**Date:** 2025-12-21 +**Last Updated:** 2025-12-21 + +**Review Notes (Opus 4.5):** +- Corrected testing section: Frontend tests exist (Extensions.svelte.test.ts, CommandPalette.svelte.test.ts) +- Corrected CI section: nightly.yml exists, needs test steps added +- Clarified TODO locations: TypeScript/Svelte only, no TODOs in Rust +- Added eprintln! count (~45 occurrences) to logging analysis +- Confidence Level: High (verified through additional file inspection) diff --git a/docs/archive/CLAUDE_REVIEW_2025-12-22.md b/docs/archive/CLAUDE_REVIEW_2025-12-22.md new file mode 100644 index 00000000..af39afc1 --- /dev/null +++ b/docs/archive/CLAUDE_REVIEW_2025-12-22.md @@ -0,0 +1,319 @@ +# Flare Gap Analysis & Review +**Date:** 2025-12-22 +**Reviewer:** Claude Opus 4.5 +**Focus:** Extensions compatibility, Downloads Manager, overall gaps +**Last Updated:** 2025-12-22 (post-fixes review) + +--- + +## Executive Summary + +Flare is approximately **70% feature-complete** compared to Raycast (up from 60% after recent fixes). + +### ✅ Recently Fixed (This Branch) +- React Reconciler stubs now work (no more crashes) +- `usePersistentState` actually persists data +- Database indices added for performance +- N+1 query fixed in file indexer +- TcpListener port crash fixed +- Structured logging via tracing +- CPU monitor runs in background thread + +### Remaining Pain Points +1. **Extensions**: AppleScript shims still limited, some APIs missing +2. **Downloads Manager**: Does not exist +3. **System Integration**: Window management, system commands, per-command hotkeys missing +4. **Code Quality**: Some unsafe `.unwrap()` calls remain + +--- + +## 1. Extensions: Why Many Don't Work + +### 1.1 ~~Critical: React Reconciler Stubs~~ ✅ FIXED + +**Location:** `sidecar/src/hostConfig.ts:383-412` + +~~10 React Reconciler methods throw "Function not implemented" errors instead of being no-ops.~~ + +**Status:** All 10 methods now return safe no-op values (void, false, null, Date.now()). + +--- + +### 1.2 ~~Critical: usePersistentState is Fake~~ ✅ FIXED + +**Location:** `sidecar/src/api/index.ts:97-139` + +**Status:** Now properly persists to LocalStorage with: +- `useEffect` to load on mount +- `isLoading` state for async load tracking +- `useCallback` memoized setter that persists on every change +- Proper JSON parse/stringify with error handling + +--- + +### 1.3 Important: AppleScript Shim is Minimal + +**Location:** `src-tauri/src/extension_shims.rs:80-114` + +Only 4 AppleScript patterns are supported: + +| Pattern | Linux Equivalent | +|---------|-----------------| +| `tell application "X" to activate` | `gtk-launch` / `xdg-open` | +| `tell application "X" to quit` | `pkill -f` | +| `display notification` | `notify-send` | +| `set volume N` | `pactl` / `amixer` | + +Everything else returns: +``` +"AppleScript not supported on Linux. Script: {script}" +``` + +**Common unsupported operations:** +- `tell application "System Events"` (keystroke simulation) +- `do shell script` (should map to child_process) +- `tell application "Finder"` (file operations) +- `tell application "Safari"` (browser control) +- Property access (`get name of application`) + +--- + +### 1.4 Important: Missing/Incomplete APIs + +| Raycast API | Status | Location | +|-------------|--------|----------| +| `Clipboard.copy/paste` | ✅ Works | `sidecar/src/api/clipboard.ts` | +| `Clipboard.read (HTML)` | ❌ Not supported | `src-tauri/src/clipboard.rs:42` | +| `LocalStorage` | ✅ Works | `sidecar/src/api/utils.ts` | +| `Cache` | ✅ Works | `sidecar/src/api/cache.ts` | +| `usePersistentState` | ❌ Stub only | `sidecar/src/api/index.ts:97` | +| `runAppleScript` | ⚠️ 4 patterns only | `src-tauri/src/extension_shims.rs` | +| `BrowserExtension` | ⚠️ CSS only, no JS eval | `sidecar/src/api/browserExtension.ts` | +| `getSelectedFinderItems` | ✅ Works (Linux equiv) | `sidecar/src/api/environment.ts` | +| `getSelectedText` | ✅ Works | `sidecar/src/api/environment.ts` | +| `showInFinder` | ✅ Works (xdg-open) | `sidecar/src/api/environment.ts` | +| `trash` | ✅ Works | `sidecar/src/api/environment.ts` | +| `OAuth` | ⚠️ Works but unclear packageName | `sidecar/src/api/oauth.ts:151` | +| `AI.ask` | ✅ Works | `sidecar/src/api/ai.ts` | + +--- + +### 1.5 Path Translation Gaps + +**Location:** `src-tauri/src/extension_shims.rs:17-74` + +Path translation exists but is incomplete: + +| macOS Path | Translated To | +|------------|--------------| +| `/Applications/X.app` | `/usr/share/applications/x.desktop` | +| `/Library/` | `/usr/lib/` | +| `~/Library/Application Support/` | `~/.local/share/` | +| `~/Library/Preferences/` | `~/.config/` | +| `/Users/` | `/home/` | + +**Problem:** Many extensions hardcode paths without using Raycast APIs, so translation never happens. + +--- + +### 1.6 Extension Compatibility Estimate + +| Category | % Working | Notes | +|----------|-----------|-------| +| Pure UI (lists, forms, details) | 90% | Most work fine | +| Clipboard-based | 80% | HTML not supported | +| HTTP/API extensions | 95% | Work well | +| AppleScript automation | 10% | Only basic commands | +| Native binary bundled | 0% | macOS binaries fail | +| System Events | 5% | Almost nothing works | +| Browser control | 20% | CSS queries only | + +--- + +## 2. Downloads Manager: Does Not Exist + +### Current State + +The file indexer watches `~/Downloads` (`src-tauri/src/file_search/indexer.rs:20`), but this is only for **file search**, not download management. + +### What's Missing + +| Feature | Status | +|---------|--------| +| Download progress tracking | ❌ Not implemented | +| Download pause/resume/cancel | ❌ Not implemented | +| Download history | ❌ Not implemented | +| Downloads UI view | ❌ Not implemented | +| Browser integration | ❌ Not implemented | +| Download notifications | ❌ Not implemented | + +### Recommended Implementation + +1. Create `src-tauri/src/downloads/` module: + - `manager.rs` - Track active downloads + - `history.rs` - SQLite storage for download history + - `monitor.rs` - Watch ~/Downloads for new files + +2. Create UI in `src/lib/components/DownloadsView.svelte` + +3. Add commands: + - `list_downloads` - Get download history + - `open_download` - Open file/folder + - `clear_download_history` - Clean up + +--- + +## 3. Other Major Gaps + +### 3.1 Critical Missing Features + +| Feature | Priority | Effort | Notes | +|---------|----------|--------|-------| +| Window Management | Critical | 2 weeks | X11 via x11rb, Wayland per-compositor | +| System Commands | Critical | 1 week | shutdown, restart, sleep, lock | +| Per-Command Hotkeys | Critical | 1 week | Currently only global app toggle | +| System Tray | High | 3 days | No background indicator | + +### 3.2 Code Quality Issues + +#### Unsafe `.unwrap()` Calls (32+ instances) + +**High-risk locations:** + +| File | Risk | Issue | +|------|------|-------| +| `browser_extension.rs:170` | **Critical** | `TcpListener::bind().expect()` - crashes if port taken | +| `soulver.rs:10` | High | `CString::new().expect()` - crashes on invalid path | +| `snippets/engine.rs:22-28` | Medium | `Regex::new().unwrap()` - unlikely to fail | +| `snippets/manager.rs` | Medium | Many unwraps in tests | + +**Fix:** Replace with `?` operator or `match` statements. + +#### ~~TcpListener Port Binding~~ ✅ FIXED + +**Location:** `src-tauri/src/browser_extension.rs:170` + +**Status:** Now uses proper `match` with `tracing::error!` and graceful return instead of crashing. + +--- + +## 4. TODO Comments in Codebase + +### TypeScript/Svelte TODOs + +| Location | Comment | Priority | +|----------|---------|----------| +| `src/lib/assets.ts:44` | `// TODO: better heuristic?` | Low | +| `src/lib/assets.ts:68` | `// TODO: better heuristic?` | Low | +| `src/lib/assets.ts:74` | `// TODO: actually handle adjustContrast` | Low | +| `src/lib/components/CommandDeeplinkConfirm.svelte:39` | `` | Medium | +| `src/lib/components/nodes/shared/actions.ts:8` | `// TODO: naming?` | Low | +| `sidecar/src/api/oauth.ts:151` | `// TODO: what does this mean?` (packageName) | Medium | + +### Rust TODOs + +No TODO comments found in Rust code. + +--- + +## 5. Performance Issues + +### 5.1 ~~N+1 Query in File Indexer~~ ✅ FIXED + +**Location:** `src-tauri/src/file_search/indexer.rs` + +**Status:** Fixed in commit `55a7bd0`. Now uses batch query with HashMap lookup. + +### 5.2 ~~Missing Database Indices~~ ✅ FIXED + +**Status:** All 6 indices added in commit `55a7bd0`: +- `idx_ai_generations_created` +- `idx_ai_conversations_updated` +- `idx_clipboard_content_type` +- `idx_clipboard_pinned` +- `idx_clipboard_last_copied` +- `idx_snippets_keyword` + +--- + +## 6. Recommended Action Plan + +### ✅ Completed Quick Wins + +| # | Task | Status | Source | +|---|------|--------|--------| +| 1 | Fix React Reconciler stubs (no-op, don't throw) | ✅ Done | Current branch | +| 2 | Implement `usePersistentState` properly | ✅ Done | Current branch | +| 3 | Add database indices | ✅ Done | Commit `55a7bd0` | +| 4 | Fix TcpListener crash on port conflict | ✅ Done | Current branch | +| 5 | N+1 query fix in file indexer | ✅ Done | Commit `55a7bd0` | +| 6 | Replace println!/eprintln! with tracing | ✅ Done | Commit `8ff7426` | +| 7 | CPU monitor background thread | ✅ Done | Commit `8ff7426` | +| 8 | Remove debug console.log statements | ✅ Done | Commit `55a7bd0` | + +### Remaining Quick Wins + +| # | Task | Effort | Impact | +|---|------|--------|--------| +| 1 | Add more AppleScript shims (open URL, do shell script) | 4 hours | Medium | +| 2 | Replace remaining `.unwrap()` with safe handling | 1 day | High | + +### Medium Term (1-2 weeks) + +| # | Task | Effort | Impact | +|---|------|--------|--------| +| 3 | Create Downloads Manager module | 2 days | Medium | +| 4 | Window management (X11) | 1 week | High | +| 5 | System commands (shutdown/lock/sleep) | 2 days | High | +| 6 | Per-command global hotkeys | 1 week | High | + +### Long Term (1+ months) + +- Wayland window management (compositor-specific) +- Full AppleScript parser/translator +- Extension compatibility scoring system +- Fork/adapt top 10 popular Raycast extensions for Linux + +--- + +## 7. Files Referenced + +| File | Purpose | +|------|---------| +| `sidecar/src/hostConfig.ts` | React Reconciler configuration | +| `sidecar/src/api/index.ts` | Raycast API exports | +| `sidecar/src/api/*.ts` | Individual API implementations | +| `src-tauri/src/extension_shims.rs` | macOS API compatibility | +| `src-tauri/src/browser_extension.rs` | WebSocket server | +| `src-tauri/src/file_search/indexer.rs` | File indexing | +| `src-tauri/src/clipboard.rs` | Clipboard operations | +| `TODO.md` | Existing task tracking | + +--- + +## 8. Conclusion + +Flare has made significant progress. **8 of the original quick wins are now complete.** + +### What's Working Well Now +- ✅ Extension React rendering (reconciler fixed) +- ✅ Extension state persistence (usePersistentState fixed) +- ✅ Database performance (indices + N+1 fix) +- ✅ Logging infrastructure (tracing) +- ✅ System monitoring (background CPU thread) +- ✅ Stability (TcpListener crash fixed) + +### Remaining Focus Areas +1. **Extension Compatibility**: Expand AppleScript shims, add missing APIs +2. **Feature Gaps**: Downloads Manager, Window Management, System Commands +3. **Code Quality**: ~30 remaining `.unwrap()` calls need safe handling + +### Estimated Timeline +- **Current State:** ~70% Raycast feature parity (up from 60%) +- **To 90% parity:** 6-8 weeks of focused development +- **Key blockers:** Window management (X11/Wayland complexity) + +--- + +*This review supplements the existing TODO.md with specific technical findings.* +*Updated after fixes on 2025-12-22.* diff --git a/docs/archive/README.md b/docs/archive/README.md new file mode 100644 index 00000000..93ce2e65 --- /dev/null +++ b/docs/archive/README.md @@ -0,0 +1,61 @@ +# Archived Planning Documents + +This directory contains historical audit reports. All active planning has moved to **[ROADMAP.md](/ROADMAP.md)**. + +## 📋 Active Documents + +- **[ROADMAP.md](/ROADMAP.md)** ← **USE THIS!** - Comprehensive roadmap (updated Dec 24, 2025) + - Current status: v0.1.1, 75% Raycast parity + - Next priorities: Per-command hotkeys, system commands + - Milestones and feature tracking + +## 📚 Archive Contents + +### Audit Reports (Reference Only) + +- **[AUDIT_REPORT.md](./AUDIT_REPORT.md)** - Comprehensive code audit (Dec 21, 2025) + - 📖 Status: Reference document + - 🔍 Contains: Performance analysis, code quality review, security concerns + - 💡 Note: Many issues addressed in v0.1.0 and v0.1.1 + +- **[CLAUDE_REVIEW_2025-12-22.md](./CLAUDE_REVIEW_2025-12-22.md)** - Code review (Dec 22, 2025) + - ✅ Status: Issues fixed in v0.1.0 and v0.1.1 + - 🐛 Addressed: Extension compatibility, database performance, graceful error handling + +## 🔄 Completions Since Audit (v0.1.0 - v0.1.1) + +Items from the audit reports that have been completed: + +### Performance & Stability +- ✅ Database indices added (clipboard, AI, snippets) +- ✅ N+1 query fix in file indexer +- ✅ CPU monitor background thread (non-blocking) +- ✅ Structured logging with tracing crate + +### Code Quality +- ✅ Debug console.log statements removed +- ✅ println!/eprintln! replaced with proper logging +- ✅ Extension compatibility fixes (React Reconciler, usePersistentState) +- ✅ TcpListener graceful port conflict handling + +### Features Added +- ✅ Comprehensive settings system (6 tabs) +- ✅ Theme support (9 professional themes) +- ✅ Close on blur functionality +- ✅ Auto-start on login +- ✅ Frecency bug fix (timestamp conversion) +- ✅ Window edge visibility improvements +- ✅ Version management automation + +## 📖 Using These Documents + +**For current work:** Always refer to [ROADMAP.md](/ROADMAP.md) + +**For historical context:** +- Understanding technical debt inventory → AUDIT_REPORT.md +- Reviewing past code quality issues → CLAUDE_REVIEW_2025-12-22.md + +--- + +**Last Updated:** Dec 24, 2025 +**Maintained By:** Reference only - see ROADMAP.md for active planning diff --git a/justfile b/justfile new file mode 100644 index 00000000..661b103f --- /dev/null +++ b/justfile @@ -0,0 +1,434 @@ +# Flare Build System +# Run `just --list` to see available recipes + +# ============================================================================ +# Variables +# ============================================================================ + +# Read version from package.json +version := `jq -r .version package.json` + +# Get the Rust target triple for sidecar naming +arch := `rustc -vV | awk '/host:/ {print $2}'` + +# Install directories +local_bin := env_var('HOME') / ".local/bin" +appimage_dir := "src-tauri/target/release/bundle/appimage" +deb_dir := "src-tauri/target/release/bundle/deb" + +# Swift library paths for runtime +swift_lib_path := justfile_directory() / "src-tauri/SoulverWrapper/.build/release:" + justfile_directory() / "src-tauri/SoulverWrapper/Vendor/SoulverCore-linux" + +# ============================================================================ +# Dependency Checks +# ============================================================================ + +# Check all required build dependencies are installed +[group('setup')] +check-deps: + #!/usr/bin/env bash + set -e + echo "🔍 Checking dependencies..." + + missing=() + + command -v pnpm &>/dev/null || missing+=("pnpm") + command -v swift &>/dev/null || missing+=("swift") + command -v cargo &>/dev/null || missing+=("cargo (rustup)") + command -v jq &>/dev/null || missing+=("jq") + + if [ ${#missing[@]} -ne 0 ]; then + echo "❌ Missing dependencies:" + for dep in "${missing[@]}"; do + echo " - $dep" + done + exit 1 + fi + + echo "✅ All dependencies found" + echo " pnpm: $(pnpm --version)" + echo " swift: $(swift --version 2>&1 | head -1)" + echo " cargo: $(cargo --version)" + +# Check AppImage tools are installed (for full build) +[group('setup')] +check-appimage-tools: + #!/usr/bin/env bash + set -e + + if ! command -v linuxdeploy &>/dev/null; then + echo "❌ linuxdeploy not found" + echo "Run: just setup-tools" + exit 1 + fi + + if ! command -v appimagetool &>/dev/null; then + echo "❌ appimagetool not found" + echo "Run: just setup-tools" + exit 1 + fi + + echo "✅ AppImage tools found" + +# ============================================================================ +# Setup +# ============================================================================ + +# Download and install AppImage build tools to ~/.local/bin +[group('setup')] +setup-tools: + #!/usr/bin/env bash + set -e + + mkdir -p "{{local_bin}}" + + echo "📦 Installing AppImage build tools..." + + if [ ! -f "{{local_bin}}/linuxdeploy-x86_64.AppImage" ]; then + echo "⬇️ Downloading linuxdeploy..." + curl -L -o "{{local_bin}}/linuxdeploy-x86_64.AppImage" \ + "https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage" + chmod +x "{{local_bin}}/linuxdeploy-x86_64.AppImage" + ln -sf "{{local_bin}}/linuxdeploy-x86_64.AppImage" "{{local_bin}}/linuxdeploy" + echo "✅ linuxdeploy installed" + else + echo "✅ linuxdeploy already installed" + fi + + if [ ! -f "{{local_bin}}/appimagetool-x86_64.AppImage" ]; then + echo "⬇️ Downloading appimagetool..." + curl -L -o "{{local_bin}}/appimagetool-x86_64.AppImage" \ + "https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage" + chmod +x "{{local_bin}}/appimagetool-x86_64.AppImage" + ln -sf "{{local_bin}}/appimagetool-x86_64.AppImage" "{{local_bin}}/appimagetool" + echo "✅ appimagetool installed" + else + echo "✅ appimagetool already installed" + fi + + echo "" + echo "✅ All AppImage tools installed!" + echo "Make sure {{local_bin}} is in your PATH" + +# Install AppImage tools system-wide (requires sudo) +[group('setup')] +[confirm("This will create symlinks in /usr/local/bin. Continue?")] +install-tools-system: + #!/usr/bin/env bash + set -e + + echo "📦 Installing AppImage tools system-wide..." + + if [ -L "/usr/local/bin/linuxdeploy" ] && [ -L "/usr/local/bin/appimagetool" ]; then + echo "✅ Tools already installed system-wide" + exit 0 + fi + + if [ -f "{{local_bin}}/linuxdeploy-x86_64.AppImage" ]; then + sudo ln -sf "{{local_bin}}/linuxdeploy-x86_64.AppImage" /usr/local/bin/linuxdeploy + echo "✅ linuxdeploy symlinked" + else + echo "❌ linuxdeploy not found. Run: just setup-tools" + exit 1 + fi + + if [ -f "{{local_bin}}/appimagetool-x86_64.AppImage" ]; then + sudo ln -sf "{{local_bin}}/appimagetool-x86_64.AppImage" /usr/local/bin/appimagetool + echo "✅ appimagetool symlinked" + else + echo "❌ appimagetool not found. Run: just setup-tools" + exit 1 + fi + + echo "✅ All tools installed to /usr/local/bin" + +# ============================================================================ +# Build Components +# ============================================================================ + +# Build the sidecar (Node.js extension runtime) +[group('build')] +sidecar: check-deps + #!/usr/bin/env bash + set -e + echo "📦 Building sidecar..." + pnpm --filter sidecar build + echo "✅ Sidecar built" + +# Build the SoulverCore Swift wrapper +[group('build')] +swift: check-deps + #!/usr/bin/env bash + set -e + echo "🐦 Building SoulverCore wrapper..." + swift build -c release --package-path src-tauri/SoulverWrapper + echo "✅ Swift wrapper built" + +# Build the Svelte frontend +[group('build')] +frontend: check-deps + #!/usr/bin/env bash + set -e + echo "🎨 Building frontend..." + pnpm build + echo "✅ Frontend built" + +# ============================================================================ +# Full Builds +# ============================================================================ + +# Build everything (AppImage) +[group('build')] +build: check-deps check-appimage-tools sidecar swift + #!/usr/bin/env bash + set -e + + export PATH="{{local_bin}}:$PATH" + export LD_LIBRARY_PATH="/opt/swift/usr/lib/swift/linux:${LD_LIBRARY_PATH:-}" + + echo "🚀 Building Tauri app..." + pnpm tauri build + + echo "" + echo "✅ Build complete!" + echo "AppImage location: {{appimage_dir}}/" + ls -lh {{appimage_dir}}/*.AppImage 2>/dev/null || echo "No AppImage found" + +# Build DEB package only (no AppImage tools required) +[group('build')] +build-deb: check-deps sidecar swift + #!/usr/bin/env bash + set -e + + echo "🚀 Building Tauri app (DEB only)..." + pnpm tauri build --bundles deb + + echo "" + echo "✅ Build complete!" + echo "DEB package: {{deb_dir}}/flare_{{version}}_amd64.deb" + echo "" + echo "To install:" + echo " sudo dpkg -i {{deb_dir}}/flare_{{version}}_amd64.deb" + +# Build RPM package only +[group('build')] +build-rpm: check-deps sidecar swift + #!/usr/bin/env bash + set -e + + echo "🚀 Building Tauri app (RPM only)..." + pnpm tauri build --bundles rpm + + echo "" + echo "✅ Build complete!" + ls -lh src-tauri/target/release/bundle/rpm/*.rpm 2>/dev/null || echo "No RPM found" + +# Build AppImage only (requires AppImage tools) +[group('build')] +build-appimage: check-deps check-appimage-tools sidecar swift + #!/usr/bin/env bash + set -e + + export PATH="{{local_bin}}:$PATH" + export LD_LIBRARY_PATH="/opt/swift/usr/lib/swift/linux:${LD_LIBRARY_PATH:-}" + + echo "🚀 Building Tauri app (AppImage only)..." + pnpm tauri build --bundles appimage + + echo "" + echo "✅ Build complete!" + echo "AppImage location: {{appimage_dir}}/" + ls -lh {{appimage_dir}}/*.AppImage 2>/dev/null || echo "No AppImage found" + +# Build AppImage with faster profile (no LTO, parallel codegen) +[group('build')] +build-appimage-fast: check-deps check-appimage-tools sidecar swift + #!/usr/bin/env bash + set -e + + export PATH="{{local_bin}}:$PATH" + export LD_LIBRARY_PATH="/opt/swift/usr/lib/swift/linux:${LD_LIBRARY_PATH:-}" + + echo "🚀 Building Tauri app (AppImage, fast profile)..." + pnpm tauri build --bundles appimage -- --profile release-fast + + echo "" + echo "✅ Build complete!" + echo "AppImage location: {{appimage_dir}}/" + ls -lh {{appimage_dir}}/*.AppImage 2>/dev/null || echo "No AppImage found" + +# Build DEB with faster profile (no LTO, parallel codegen) +[group('build')] +build-deb-fast: check-deps sidecar swift + #!/usr/bin/env bash + set -e + + echo "🚀 Building Tauri app (DEB, fast profile)..." + pnpm tauri build --bundles deb -- --profile release-fast + + echo "" + echo "✅ Build complete!" + echo "DEB package: {{deb_dir}}/flare_{{version}}_amd64.deb" + +# ============================================================================ +# Install & Run +# ============================================================================ + +# Install built AppImage to ~/.local/bin +[group('run')] +install: build + #!/usr/bin/env bash + set -e + + echo "📥 Installing Flare..." + + APPIMAGE=$(find {{appimage_dir}} -name "*.AppImage" -type f 2>/dev/null | head -1) + + if [ -z "$APPIMAGE" ]; then + echo "❌ No AppImage found. Build may have failed." + exit 1 + fi + + mkdir -p "{{local_bin}}" + + # Kill any running instances + echo "🛑 Stopping any running instances..." + pkill -f "flare.AppImage" || true + sleep 1 + + # Copy and make executable + echo "📋 Copying to {{local_bin}}/flare.AppImage..." + cp "$APPIMAGE" "{{local_bin}}/flare.AppImage" + chmod +x "{{local_bin}}/flare.AppImage" + + echo "" + echo "✅ Installation complete!" + echo "Installed to: {{local_bin}}/flare.AppImage" + +# Run the installed AppImage +[group('run')] +run: + #!/usr/bin/env bash + set -e + + if [ ! -f "{{local_bin}}/flare.AppImage" ]; then + echo "❌ Flare not installed at {{local_bin}}/flare.AppImage" + echo "Run: just install" + exit 1 + fi + + echo "🚀 Starting Flare..." + exec "{{local_bin}}/flare.AppImage" + +# Build, install, and run (full pipeline) +[group('run')] +build-and-run: install run + +# ============================================================================ +# Development +# ============================================================================ + +# Run in development mode with hot-reload +[group('dev')] +dev: check-deps + #!/usr/bin/env bash + set -e + export LD_LIBRARY_PATH="{{swift_lib_path}}" + exec pnpm tauri dev + +# Run frontend only (no Tauri) +[group('dev')] +dev-frontend: + pnpm dev + +# Type check the codebase +[group('dev')] +check: + pnpm check + +# Run linting +[group('dev')] +lint: + pnpm lint + +# Run tests +[group('dev')] +test: + pnpm test + +# ============================================================================ +# Utilities +# ============================================================================ + +# Clean all build artifacts +[group('util')] +[confirm("This will delete all build artifacts. Continue?")] +clean: + #!/usr/bin/env bash + set -e + echo "🧹 Cleaning build artifacts..." + + rm -rf src-tauri/target + rm -rf build + rm -rf .svelte-kit + rm -rf sidecar/dist + rm -rf src-tauri/binaries/app-* + rm -rf src-tauri/SoulverWrapper/.build + + echo "✅ Clean complete" + +# Show build configuration +[group('util')] +info: + @echo "Flare Build Info" + @echo "================" + @echo "Version: {{version}}" + @echo "Target: {{arch}}" + @echo "Local bin: {{local_bin}}" + +# List all recipes +[group('util')] +@help: + just --list --unsorted + +# Enable Flare to start on login +[group('util')] +autostart: + #!/usr/bin/env bash + set -e + + LOCAL_BIN="${HOME}/.local/bin" + AUTOSTART_DIR="${HOME}/.config/autostart" + DESKTOP_FILE="${AUTOSTART_DIR}/flare.desktop" + + mkdir -p "$AUTOSTART_DIR" + + echo "[Desktop Entry]" > "$DESKTOP_FILE" + echo "Type=Application" >> "$DESKTOP_FILE" + echo "Name=Flare" >> "$DESKTOP_FILE" + echo "Comment=Spotlight-like launcher for Linux" >> "$DESKTOP_FILE" + echo "Exec=${LOCAL_BIN}/flare.AppImage" >> "$DESKTOP_FILE" + echo "Icon=flare" >> "$DESKTOP_FILE" + echo "Terminal=false" >> "$DESKTOP_FILE" + echo "Categories=Utility;" >> "$DESKTOP_FILE" + echo "X-GNOME-Autostart-enabled=true" >> "$DESKTOP_FILE" + echo "StartupNotify=false" >> "$DESKTOP_FILE" + + echo "✅ Autostart enabled" + echo "Flare will start automatically on login" + echo "Desktop file: $DESKTOP_FILE" + +# Disable Flare autostart +[group('util')] +remove-autostart: + #!/usr/bin/env bash + set -e + + DESKTOP_FILE="${HOME}/.config/autostart/flare.desktop" + + if [ -f "$DESKTOP_FILE" ]; then + rm "$DESKTOP_FILE" + echo "✅ Autostart disabled" + else + echo "ℹ️ Autostart was not enabled" + fi diff --git a/logs/.ff15824ab550a7e56d972048b294f9f5951b46b4-audit.json b/logs/.ff15824ab550a7e56d972048b294f9f5951b46b4-audit.json new file mode 100644 index 00000000..8856a764 --- /dev/null +++ b/logs/.ff15824ab550a7e56d972048b294f9f5951b46b4-audit.json @@ -0,0 +1,20 @@ +{ + "keep": { + "days": true, + "amount": 14 + }, + "auditLog": "/home/steven/scratch/flareup/logs/.ff15824ab550a7e56d972048b294f9f5951b46b4-audit.json", + "files": [ + { + "date": 1765149019261, + "name": "/home/steven/scratch/flareup/logs/mcp-puppeteer-2025-12-07.log", + "hash": "5f604a2a05372b44534fd8dbd90eecdfaa0125e9789c1780cddcf4ecdb2580a1" + }, + { + "date": 1765198263300, + "name": "/home/steven/scratch/flareup/logs/mcp-puppeteer-2025-12-08.log", + "hash": "9a778a43ff4fbe6968ee775409af4581b65ae2fe6e6eacc6283c5f28ca68d501" + } + ], + "hashType": "sha256" +} \ No newline at end of file diff --git a/logs/mcp-puppeteer-2025-12-07.log b/logs/mcp-puppeteer-2025-12-07.log new file mode 100644 index 00000000..d913cd3c --- /dev/null +++ b/logs/mcp-puppeteer-2025-12-07.log @@ -0,0 +1,2 @@ +{"level":"info","message":"Starting MCP server","service":"mcp-puppeteer","timestamp":"2025-12-07 18:10:19.322"} +{"level":"info","message":"MCP server started successfully","service":"mcp-puppeteer","timestamp":"2025-12-07 18:10:19.323"} diff --git a/logs/mcp-puppeteer-2025-12-08.log b/logs/mcp-puppeteer-2025-12-08.log new file mode 100644 index 00000000..ce685e79 --- /dev/null +++ b/logs/mcp-puppeteer-2025-12-08.log @@ -0,0 +1,4 @@ +{"level":"info","message":"Starting MCP server","service":"mcp-puppeteer","timestamp":"2025-12-08 07:51:03.334"} +{"level":"info","message":"MCP server started successfully","service":"mcp-puppeteer","timestamp":"2025-12-08 07:51:03.335"} +{"level":"info","message":"Starting MCP server","service":"mcp-puppeteer","timestamp":"2025-12-08 07:51:06.435"} +{"level":"info","message":"MCP server started successfully","service":"mcp-puppeteer","timestamp":"2025-12-08 07:51:06.436"} diff --git a/package.json b/package.json index 111d11e9..773a1789 100644 --- a/package.json +++ b/package.json @@ -1,11 +1,14 @@ { "name": "flare", - "version": "0.1.0", + "version": "0.1.1", "description": "", "type": "module", "scripts": { "dev": "vite dev", + "dev:tauri": "just dev", "build": "vite build", + "build:full": "just build", + "build:deb": "just build-deb", "preview": "vite preview", "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch", @@ -17,16 +20,16 @@ }, "license": "MIT", "dependencies": { - "@fontsource-variable/inter": "^5.2.6", "@flare/protocol": "workspace:*", - "@tauri-apps/api": "^2", + "@fontsource-variable/inter": "^5.2.6", + "@tauri-apps/api": "~2.6.0", "@tauri-apps/plugin-clipboard-manager": "~2.2.2", "@tauri-apps/plugin-deep-link": "~2", "@tauri-apps/plugin-dialog": "~2.3.0", "@tauri-apps/plugin-fs": "~2.4.0", "@tauri-apps/plugin-global-shortcut": "~2.2.1", "@tauri-apps/plugin-http": "~2.4.4", - "@tauri-apps/plugin-opener": "~2", + "@tauri-apps/plugin-opener": "~2.3.0", "@tauri-apps/plugin-os": "~2.3.0", "@tauri-apps/plugin-shell": "~2.2.1", "embla-carousel-svelte": "^8.6.0", @@ -48,7 +51,7 @@ "@sveltejs/vite-plugin-svelte": "^5.0.0", "@tailwindcss/typography": "^0.5.16", "@tailwindcss/vite": "^4.0.0", - "@tauri-apps/cli": "^2.7.0", + "@tauri-apps/cli": "^2.9.6", "@testing-library/jest-dom": "^6.6.3", "@testing-library/svelte": "^5.2.8", "@testing-library/user-event": "^14.6.1", @@ -86,4 +89,4 @@ "esbuild" ] } -} +} \ No newline at end of file diff --git a/packages/protocol/src/main.ts b/packages/protocol/src/main.ts index 73b66f78..31b78a2e 100644 --- a/packages/protocol/src/main.ts +++ b/packages/protocol/src/main.ts @@ -43,6 +43,11 @@ const ResetElementMessageSchema = z.object({ export const SidecarMessageSchema = z.union([BatchUpdateSchema, CommandSchema, LogMessageSchema]); export type SidecarMessage = z.infer; +const CloseMainWindowMessageSchema = z.object({ + type: z.literal('close-main-window'), + payload: z.object({}) +}); + export const SidecarMessageWithPluginsSchema = z .union([ BatchUpdateSchema, @@ -65,7 +70,8 @@ export const SidecarMessageWithPluginsSchema = z AiStreamErrorMessageSchema, AiCanAccessMessageSchema, FocusElementMessageSchema, - ResetElementMessageSchema + ResetElementMessageSchema, + CloseMainWindowMessageSchema ]) .and(z.object({ timestamp: z.number() })); export type SidecarMessageWithPlugins = z.infer; diff --git a/packages/protocol/src/plugin.ts b/packages/protocol/src/plugin.ts index 76f5c925..1cf8fcf5 100644 --- a/packages/protocol/src/plugin.ts +++ b/packages/protocol/src/plugin.ts @@ -1,6 +1,13 @@ import { z } from 'zod/v4'; import { PreferenceSchema } from './preferences'; +export const CompatibilityWarningSchema = z.object({ + commandName: z.string(), + commandTitle: z.string(), + reason: z.string() +}); +export type CompatibilityWarning = z.infer; + export const PluginInfoSchema = z.object({ title: z.string(), description: z.string().optional(), @@ -13,7 +20,9 @@ export const PluginInfoSchema = z.object({ commandPreferences: z.array(PreferenceSchema).optional(), mode: z.enum(['view', 'no-view', 'menu-bar']).optional(), author: z.union([z.string(), z.object({ name: z.string() })]).optional(), - owner: z.string().optional() + owner: z.string().optional(), + compatibilityWarnings: z.array(CompatibilityWarningSchema).optional(), + compatibilityScore: z.number().int().min(0).max(100).optional() }); export type PluginInfo = z.infer; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 6f6f2a56..2b471843 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,15 +8,15 @@ importers: .: dependencies: - '@fontsource-variable/inter': - specifier: ^5.2.6 - version: 5.2.6 '@flare/protocol': specifier: workspace:* version: link:packages/protocol + '@fontsource-variable/inter': + specifier: ^5.2.6 + version: 5.2.6 '@tauri-apps/api': - specifier: ^2 - version: 2.5.0 + specifier: ~2.6.0 + version: 2.6.0 '@tauri-apps/plugin-clipboard-manager': specifier: ~2.2.2 version: 2.2.2 @@ -36,8 +36,8 @@ importers: specifier: ~2.4.4 version: 2.4.4 '@tauri-apps/plugin-opener': - specifier: ~2 - version: 2.2.7 + specifier: ~2.3.0 + version: 2.3.1 '@tauri-apps/plugin-os': specifier: ~2.3.0 version: 2.3.0 @@ -97,8 +97,8 @@ importers: specifier: ^4.0.0 version: 4.1.8(vite@6.3.5(@types/node@24.0.0)(jiti@2.4.2)(lightningcss@1.30.1)) '@tauri-apps/cli': - specifier: ^2.7.0 - version: 2.7.0 + specifier: ^2.9.6 + version: 2.9.6 '@testing-library/jest-dom': specifier: ^6.6.3 version: 6.6.3 @@ -1020,80 +1020,77 @@ packages: peerDependencies: vite: ^5.2.0 || ^6 - '@tauri-apps/api@2.5.0': - resolution: {integrity: sha512-Ldux4ip+HGAcPUmuLT8EIkk6yafl5vK0P0c0byzAKzxJh7vxelVtdPONjfgTm96PbN24yjZNESY8CKo8qniluA==} - '@tauri-apps/api@2.6.0': resolution: {integrity: sha512-hRNcdercfgpzgFrMXWwNDBN0B7vNzOzRepy6ZAmhxi5mDLVPNrTpo9MGg2tN/F7JRugj4d2aF7E1rtPXAHaetg==} - '@tauri-apps/cli-darwin-arm64@2.7.0': - resolution: {integrity: sha512-4sSrBlZuGb78UKkVQHdexzrYCamsiFQXFFuh9EI8vdq9PgTG8oXByQNIMx+p01HB594kLhaySrgozst6EFPoVQ==} + '@tauri-apps/cli-darwin-arm64@2.9.6': + resolution: {integrity: sha512-gf5no6N9FCk1qMrti4lfwP77JHP5haASZgVbBgpZG7BUepB3fhiLCXGUK8LvuOjP36HivXewjg72LTnPDScnQQ==} engines: {node: '>= 10'} cpu: [arm64] os: [darwin] - '@tauri-apps/cli-darwin-x64@2.7.0': - resolution: {integrity: sha512-Uec0pKyw5/w4UmcRLyPt/+JG4dsIKj0TeKtF3PDz4EAGOevjSBaLIgu8aC62s3wKLCtDydTdIMMQ1ENHTJgfPA==} + '@tauri-apps/cli-darwin-x64@2.9.6': + resolution: {integrity: sha512-oWh74WmqbERwwrwcueJyY6HYhgCksUc6NT7WKeXyrlY/FPmNgdyQAgcLuTSkhRFuQ6zh4Np1HZpOqCTpeZBDcw==} engines: {node: '>= 10'} cpu: [x64] os: [darwin] - '@tauri-apps/cli-linux-arm-gnueabihf@2.7.0': - resolution: {integrity: sha512-ZE7y/3MW9i1DccnHyktBDbryMWVFdnIfdqQoJ40iBzQGoBIQk4DonA5zROcqw0qGRTsXfTRnVjIXnh5eLkoCzQ==} + '@tauri-apps/cli-linux-arm-gnueabihf@2.9.6': + resolution: {integrity: sha512-/zde3bFroFsNXOHN204DC2qUxAcAanUjVXXSdEGmhwMUZeAQalNj5cz2Qli2elsRjKN/hVbZOJj0gQ5zaYUjSg==} engines: {node: '>= 10'} cpu: [arm] os: [linux] - '@tauri-apps/cli-linux-arm64-gnu@2.7.0': - resolution: {integrity: sha512-cWGl8OL+FjPWib+K8YK1S6o2Z34+f2LxnFRziTPdHwrdlVNO2xYkJmrT6X3PyHtccf/IByicxVvjkExLlpZ4+A==} + '@tauri-apps/cli-linux-arm64-gnu@2.9.6': + resolution: {integrity: sha512-pvbljdhp9VOo4RnID5ywSxgBs7qiylTPlK56cTk7InR3kYSTJKYMqv/4Q/4rGo/mG8cVppesKIeBMH42fw6wjg==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@tauri-apps/cli-linux-arm64-musl@2.7.0': - resolution: {integrity: sha512-MftwAsMDw9dmKosbHdSHyCa0jgKjt9UslJsRpXym8dmbw+gzD3YrY41GORq0HbCYImC5rS/qQf9eELoLcqY/BQ==} + '@tauri-apps/cli-linux-arm64-musl@2.9.6': + resolution: {integrity: sha512-02TKUndpodXBCR0oP//6dZWGYcc22Upf2eP27NvC6z0DIqvkBBFziQUcvi2n6SrwTRL0yGgQjkm9K5NIn8s6jw==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@tauri-apps/cli-linux-riscv64-gnu@2.7.0': - resolution: {integrity: sha512-QwL4YhckgtozR7wlXOTRKijkgViz4B0OeXqhsIrhQock2HDxqCh5VqhGQ9LRJ6HsXY1JkAYZQUAFvcBj2DHUXQ==} + '@tauri-apps/cli-linux-riscv64-gnu@2.9.6': + resolution: {integrity: sha512-fmp1hnulbqzl1GkXl4aTX9fV+ubHw2LqlLH1PE3BxZ11EQk+l/TmiEongjnxF0ie4kV8DQfDNJ1KGiIdWe1GvQ==} engines: {node: '>= 10'} cpu: [riscv64] os: [linux] - '@tauri-apps/cli-linux-x64-gnu@2.7.0': - resolution: {integrity: sha512-4MwN2sqUEYGKjwcs0afPp79DkiydAfkGNdBTPfvNrDUmhbT6JE95RhYyzfZLfXNinsdB4nbZmYGEPIv5NpK2KQ==} + '@tauri-apps/cli-linux-x64-gnu@2.9.6': + resolution: {integrity: sha512-vY0le8ad2KaV1PJr+jCd8fUF9VOjwwQP/uBuTJvhvKTloEwxYA/kAjKK9OpIslGA9m/zcnSo74czI6bBrm2sYA==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@tauri-apps/cli-linux-x64-musl@2.7.0': - resolution: {integrity: sha512-N1hygCRW0X4msCpdG3/UXb+Z8wpc4lYRqlkhbbxLOjzHwjuLS+86GYAvRLe7JIePVXVUI5BfX1HeurdbWWINTg==} + '@tauri-apps/cli-linux-x64-musl@2.9.6': + resolution: {integrity: sha512-TOEuB8YCFZTWVDzsO2yW0+zGcoMiPPwcUgdnW1ODnmgfwccpnihDRoks+ABT1e3fHb1ol8QQWsHSCovb3o2ENQ==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@tauri-apps/cli-win32-arm64-msvc@2.7.0': - resolution: {integrity: sha512-sqVDkwvUsuHeUVlMXpkXPo4gWFXuOcNbme0xPQ1r07hqBxYJlqwDY9XS0sC/7XljnO6anJaSr8FJkuixnIqyUQ==} + '@tauri-apps/cli-win32-arm64-msvc@2.9.6': + resolution: {integrity: sha512-ujmDGMRc4qRLAnj8nNG26Rlz9klJ0I0jmZs2BPpmNNf0gM/rcVHhqbEkAaHPTBVIrtUdf7bGvQAD2pyIiUrBHQ==} engines: {node: '>= 10'} cpu: [arm64] os: [win32] - '@tauri-apps/cli-win32-ia32-msvc@2.7.0': - resolution: {integrity: sha512-2sZFAZZC8fwc65d5ACrTUgKPYbZgd3DAN5gI4WzCyTQgPqLq/7CWD25Wt7+scTV7FhT1zjL88hI10yPsEE0kkA==} + '@tauri-apps/cli-win32-ia32-msvc@2.9.6': + resolution: {integrity: sha512-S4pT0yAJgFX8QRCyKA1iKjZ9Q/oPjCZf66A/VlG5Yw54Nnr88J1uBpmenINbXxzyhduWrIXBaUbEY1K80ZbpMg==} engines: {node: '>= 10'} cpu: [ia32] os: [win32] - '@tauri-apps/cli-win32-x64-msvc@2.7.0': - resolution: {integrity: sha512-mGqRRpqdZ5iKLaQwP1paz89koLVrE/mUOrq6boftHDxF8K1sU/9Um/E7SLrkfeICiWbVG5yY7j2N/j8/D80blw==} + '@tauri-apps/cli-win32-x64-msvc@2.9.6': + resolution: {integrity: sha512-ldWuWSSkWbKOPjQMJoYVj9wLHcOniv7diyI5UAJ4XsBdtaFB0pKHQsqw/ItUma0VXGC7vB4E9fZjivmxur60aw==} engines: {node: '>= 10'} cpu: [x64] os: [win32] - '@tauri-apps/cli@2.7.0': - resolution: {integrity: sha512-ozyxKm5YvivvLyrgHKyl6L+6y1/TkkeoA0cppPqxDv+ldbbtYiXx7dH8/G20tINh7dE+omSimN36i9M1ClGxtQ==} + '@tauri-apps/cli@2.9.6': + resolution: {integrity: sha512-3xDdXL5omQ3sPfBfdC8fCtDKcnyV7OqyzQgfyT5P3+zY6lcPqIYKQBvUasNvppi21RSdfhy44ttvJmftb0PCDw==} engines: {node: '>= 10'} hasBin: true @@ -1115,8 +1112,8 @@ packages: '@tauri-apps/plugin-http@2.4.4': resolution: {integrity: sha512-IjlaaS4z8ybB10qnh6djZ2FIaHdWVs/U1D1C56udKcjX1f+jAmR8/s7qBV5cv/OmcGkOIzet3LWuH70LKf4J2A==} - '@tauri-apps/plugin-opener@2.2.7': - resolution: {integrity: sha512-uduEyvOdjpPOEeDRrhwlCspG/f9EQalHumWBtLBnp3fRp++fKGLqDOyUhSIn7PzX45b/rKep//ZQSAQoIxobLA==} + '@tauri-apps/plugin-opener@2.3.1': + resolution: {integrity: sha512-ia4EW2x/ux2qYpuBRoo1qHwK9/tPwnXYX8HizrJlyHyd3iJCueI5lC7Oa3y1zaCKLZH5elEFSWunnfar8ejbdg==} '@tauri-apps/plugin-os@2.3.0': resolution: {integrity: sha512-dm3bDsMuUngpIQdJ1jaMkMfyQpHyDcaTIKTFaAMHoKeUd+Is3UHO2uzhElr6ZZkfytIIyQtSVnCWdW2Kc58f3g==} @@ -3986,64 +3983,62 @@ snapshots: tailwindcss: 4.1.8 vite: 6.3.5(@types/node@24.0.0)(jiti@2.4.2)(lightningcss@1.30.1) - '@tauri-apps/api@2.5.0': {} - '@tauri-apps/api@2.6.0': {} - '@tauri-apps/cli-darwin-arm64@2.7.0': + '@tauri-apps/cli-darwin-arm64@2.9.6': optional: true - '@tauri-apps/cli-darwin-x64@2.7.0': + '@tauri-apps/cli-darwin-x64@2.9.6': optional: true - '@tauri-apps/cli-linux-arm-gnueabihf@2.7.0': + '@tauri-apps/cli-linux-arm-gnueabihf@2.9.6': optional: true - '@tauri-apps/cli-linux-arm64-gnu@2.7.0': + '@tauri-apps/cli-linux-arm64-gnu@2.9.6': optional: true - '@tauri-apps/cli-linux-arm64-musl@2.7.0': + '@tauri-apps/cli-linux-arm64-musl@2.9.6': optional: true - '@tauri-apps/cli-linux-riscv64-gnu@2.7.0': + '@tauri-apps/cli-linux-riscv64-gnu@2.9.6': optional: true - '@tauri-apps/cli-linux-x64-gnu@2.7.0': + '@tauri-apps/cli-linux-x64-gnu@2.9.6': optional: true - '@tauri-apps/cli-linux-x64-musl@2.7.0': + '@tauri-apps/cli-linux-x64-musl@2.9.6': optional: true - '@tauri-apps/cli-win32-arm64-msvc@2.7.0': + '@tauri-apps/cli-win32-arm64-msvc@2.9.6': optional: true - '@tauri-apps/cli-win32-ia32-msvc@2.7.0': + '@tauri-apps/cli-win32-ia32-msvc@2.9.6': optional: true - '@tauri-apps/cli-win32-x64-msvc@2.7.0': + '@tauri-apps/cli-win32-x64-msvc@2.9.6': optional: true - '@tauri-apps/cli@2.7.0': + '@tauri-apps/cli@2.9.6': optionalDependencies: - '@tauri-apps/cli-darwin-arm64': 2.7.0 - '@tauri-apps/cli-darwin-x64': 2.7.0 - '@tauri-apps/cli-linux-arm-gnueabihf': 2.7.0 - '@tauri-apps/cli-linux-arm64-gnu': 2.7.0 - '@tauri-apps/cli-linux-arm64-musl': 2.7.0 - '@tauri-apps/cli-linux-riscv64-gnu': 2.7.0 - '@tauri-apps/cli-linux-x64-gnu': 2.7.0 - '@tauri-apps/cli-linux-x64-musl': 2.7.0 - '@tauri-apps/cli-win32-arm64-msvc': 2.7.0 - '@tauri-apps/cli-win32-ia32-msvc': 2.7.0 - '@tauri-apps/cli-win32-x64-msvc': 2.7.0 + '@tauri-apps/cli-darwin-arm64': 2.9.6 + '@tauri-apps/cli-darwin-x64': 2.9.6 + '@tauri-apps/cli-linux-arm-gnueabihf': 2.9.6 + '@tauri-apps/cli-linux-arm64-gnu': 2.9.6 + '@tauri-apps/cli-linux-arm64-musl': 2.9.6 + '@tauri-apps/cli-linux-riscv64-gnu': 2.9.6 + '@tauri-apps/cli-linux-x64-gnu': 2.9.6 + '@tauri-apps/cli-linux-x64-musl': 2.9.6 + '@tauri-apps/cli-win32-arm64-msvc': 2.9.6 + '@tauri-apps/cli-win32-ia32-msvc': 2.9.6 + '@tauri-apps/cli-win32-x64-msvc': 2.9.6 '@tauri-apps/plugin-clipboard-manager@2.2.2': dependencies: - '@tauri-apps/api': 2.5.0 + '@tauri-apps/api': 2.6.0 '@tauri-apps/plugin-deep-link@2.3.0': dependencies: - '@tauri-apps/api': 2.5.0 + '@tauri-apps/api': 2.6.0 '@tauri-apps/plugin-dialog@2.3.0': dependencies: @@ -4055,15 +4050,15 @@ snapshots: '@tauri-apps/plugin-global-shortcut@2.2.1': dependencies: - '@tauri-apps/api': 2.5.0 + '@tauri-apps/api': 2.6.0 '@tauri-apps/plugin-http@2.4.4': dependencies: - '@tauri-apps/api': 2.5.0 + '@tauri-apps/api': 2.6.0 - '@tauri-apps/plugin-opener@2.2.7': + '@tauri-apps/plugin-opener@2.3.1': dependencies: - '@tauri-apps/api': 2.5.0 + '@tauri-apps/api': 2.6.0 '@tauri-apps/plugin-os@2.3.0': dependencies: @@ -4071,7 +4066,7 @@ snapshots: '@tauri-apps/plugin-shell@2.2.1': dependencies: - '@tauri-apps/api': 2.5.0 + '@tauri-apps/api': 2.6.0 '@testing-library/dom@10.4.0': dependencies: diff --git a/sidecar/src/api/environment.ts b/sidecar/src/api/environment.ts index 0551a40a..c4ccfaa8 100644 --- a/sidecar/src/api/environment.ts +++ b/sidecar/src/api/environment.ts @@ -5,6 +5,7 @@ import type { Application } from './types'; import { config } from '../config'; import { browserExtensionState, aiContext } from '../state'; import { invokeCommand } from './rpc'; +import { runAppleScriptShim, translatePath } from './shims'; const supportPath = config.supportDir; try { @@ -92,3 +93,13 @@ export async function trash(path: fs.PathLike | fs.PathLike[]): Promise { const paths = (Array.isArray(path) ? path : [path]).map((p) => p.toString()); return invokeCommand('trash', { paths }); } + +/** + * Shim for runAppleScript - provides Linux compatibility + * @param script - AppleScript code to execute + * @returns Output from the script execution + */ +export async function runAppleScript(script: string): Promise { + return runAppleScriptShim(script); +} + diff --git a/sidecar/src/api/index.ts b/sidecar/src/api/index.ts index 2afb17eb..aca35288 100644 --- a/sidecar/src/api/index.ts +++ b/sidecar/src/api/index.ts @@ -20,6 +20,7 @@ import { getFrontmostApplication, showInFinder, trash, + runAppleScript, AI as AIConstant } from './environment'; import { preferencesStore } from '../preferences'; @@ -31,6 +32,7 @@ import * as OAuth from './oauth'; import { AI } from './ai'; import { Keyboard } from './keyboard'; import { currentPluginName, currentPluginPreferences } from '../state'; +import { writeOutput } from '../io'; const Image = { Mask: { @@ -79,13 +81,61 @@ export const getRaycastApi = () => { showToast, showHUD, trash, + runAppleScript, + closeMainWindow: async () => { + // Send message to frontend to hide the main window + writeOutput({ + type: 'close-main-window', + payload: {} + }); + }, + popToRoot: async () => { + // Navigate back to plugin list - extensions handle this themselves + // by completing execution which triggers go-back-to-plugin-list + }, useNavigation, usePersistentState: ( key: string, initialValue: T ): [T, React.Dispatch>, boolean] => { - const [state, setState] = React.useState(initialValue); - return [state, setState, false]; + const [state, setState] = React.useState(initialValue); + const [isLoading, setIsLoading] = React.useState(true); + + // Load persisted value on mount + React.useEffect(() => { + LocalStorage.getItem(key) + .then((stored) => { + if (stored !== undefined) { + try { + setState(JSON.parse(stored)); + } catch (e) { + console.error(`Failed to parse persisted state for key "${key}":`, e); + } + } + }) + .catch((e) => { + console.error(`Failed to load persisted state for key "${key}":`, e); + }) + .finally(() => { + setIsLoading(false); + }); + }, [key]); + + // Wrapper that persists to LocalStorage on every state change + const setPersistentState = React.useCallback( + (value: React.SetStateAction) => { + setState((prev) => { + const nextValue = typeof value === 'function' ? (value as (prev: T) => T)(prev) : value; + LocalStorage.setItem(key, JSON.stringify(nextValue)).catch((e) => { + console.error(`Failed to persist state for key "${key}":`, e); + }); + return nextValue; + }); + }, + [key] + ); + + return [state, setPersistentState, isLoading]; }, BrowserExtension: BrowserExtensionAPI, Keyboard diff --git a/sidecar/src/api/shims.ts b/sidecar/src/api/shims.ts new file mode 100644 index 00000000..1f5efe87 --- /dev/null +++ b/sidecar/src/api/shims.ts @@ -0,0 +1,78 @@ +import { invokeCommand } from './rpc'; + +/** + * Extension shims for macOS API compatibility on Linux + * This module provides Linux equivalents for macOS-specific APIs + */ + +export interface ShimResult { + success: boolean; + output?: string; + error?: string; +} + +/** + * Translates macOS paths to Linux equivalents + * @param path - macOS path to translate + * @returns Translated Linux path + */ +export async function translatePath(path: string): Promise { + return invokeCommand('shim_translate_path', { path }); +} + +/** + * Attempts to execute AppleScript by translating to Linux equivalents + * @param script - AppleScript code to execute + * @returns Result of the shim execution + */ +export async function runAppleScript(script: string): Promise { + return invokeCommand('shim_run_applescript', { script }); +} + +/** + * Gets system information in a cross-platform way + * @returns System information map + */ +export async function getSystemInfo(): Promise> { + return invokeCommand>('shim_get_system_info', {}); +} + +/** + * Normalizes file paths in extension code + * Replaces macOS-specific paths with Linux equivalents + * @param code - Extension code to normalize + * @returns Normalized code + */ +export function normalizePathsInCode(code: string): string { + let normalized = code; + + // Replace common macOS path patterns + const pathReplacements: Array<[RegExp, string]> = [ + [/\/Applications\//g, '/usr/share/applications/'], + [/\/Library\//g, '/usr/lib/'], + [/\/Users\//g, '/home/'], + [/~\/Library\/Application Support\//g, '~/.local/share/'], + [/~\/Library\/Preferences\//g, '~/.config/'], + [/~\/Library\//g, '~/.local/lib/'] + ]; + + for (const [pattern, replacement] of pathReplacements) { + normalized = normalized.replace(pattern, replacement); + } + + return normalized; +} + +/** + * Shim for Raycast's runAppleScript API + * This function should be injected into the extension environment + */ +export async function runAppleScriptShim(script: string): Promise { + const result = await runAppleScript(script); + + if (!result.success) { + throw new Error(result.error || 'AppleScript execution failed'); + } + + return result.output || ''; +} diff --git a/sidecar/src/api/systemCommands.ts b/sidecar/src/api/systemCommands.ts new file mode 100644 index 00000000..cb342bac --- /dev/null +++ b/sidecar/src/api/systemCommands.ts @@ -0,0 +1,96 @@ +import { invoke } from '@tauri-apps/api/core'; + +export type PowerCommand = 'shutdown' | 'restart' | 'sleep' | 'lock'; + +export interface VolumeLevel { + percentage: number; + isMuted: boolean; +} + +/** + * Execute a power management command + */ +export async function executePowerCommand(command: PowerCommand): Promise { + const normalizedCommand = command.charAt(0).toUpperCase() + command.slice(1); + await invoke('execute_power_command', { command: normalizedCommand }); +} + +/** + * Shut down the system + */ +export async function shutdown(): Promise { + await executePowerCommand('shutdown'); +} + +/** + * Restart the system + */ +export async function restart(): Promise { + await executePowerCommand('restart'); +} + +/** + * Put the system to sleep + */ +export async function sleep(): Promise { + await executePowerCommand('sleep'); +} + +/** + * Lock the screen + */ +export async function lockScreen(): Promise { + await executePowerCommand('lock'); +} + +/** + * Set system volume (0-100%) + */ +export async function setVolume(level: number): Promise { + const clampedLevel = Math.max(0, Math.min(100, level)); + await invoke('set_volume', { level: clampedLevel }); +} + +/** + * Increase volume by 5% + */ +export async function volumeUp(): Promise { + await invoke('volume_up'); +} + +/** + * Decrease volume by 5% + */ +export async function volumeDown(): Promise { + await invoke('volume_down'); +} + +/** + * Toggle mute + */ +export async function toggleMute(): Promise { + await invoke('toggle_mute'); +} + +/** + * Get current volume level and mute status + */ +export async function getVolume(): Promise { + return await invoke('get_volume'); +} + +/** + * Empty the trash + * @returns Number of items removed + */ +export async function emptyTrash(): Promise { + return await invoke('empty_trash'); +} + +/** + * Eject a drive + * @param device Device path (e.g., /dev/sdb1) + */ +export async function ejectDrive(device: string): Promise { + await invoke('eject_drive', { device }); +} diff --git a/sidecar/src/hostConfig.ts b/sidecar/src/hostConfig.ts index 6425f6d4..adb1c5e8 100644 --- a/sidecar/src/hostConfig.ts +++ b/sidecar/src/hostConfig.ts @@ -360,54 +360,59 @@ export const hostConfig: HostConfig< supportsPersistence: false, supportsHydration: false, - detachDeletedInstance() {}, - commitMount() {}, - hideInstance() {}, - hideTextInstance() {}, - unhideInstance() {}, - unhideTextInstance() {}, - resetTextContent() {}, - preparePortalMount() {}, + detachDeletedInstance() { }, + commitMount() { }, + hideInstance() { }, + hideTextInstance() { }, + unhideInstance() { }, + unhideTextInstance() { }, + resetTextContent() { }, + preparePortalMount() { }, getCurrentUpdatePriority: () => 1, getInstanceFromNode: () => null, - beforeActiveInstanceBlur: () => {}, - afterActiveInstanceBlur: () => {}, - prepareScopeUpdate() {}, + beforeActiveInstanceBlur: () => { }, + afterActiveInstanceBlur: () => { }, + prepareScopeUpdate() { }, getInstanceFromScope: () => null, - setCurrentUpdatePriority() {}, + setCurrentUpdatePriority() { }, resolveUpdatePriority: () => 1, maySuspendCommit: () => false, NotPendingTransition: null, HostTransitionContext: React.createContext(null) as unknown as ReactContext, resetFormInstance: function (): void { - throw new Error('Function not implemented.'); + // No-op: Not needed for our custom renderer }, requestPostPaintCallback: function (): void { - throw new Error('Function not implemented.'); + // No-op: Not needed for our custom renderer }, shouldAttemptEagerTransition: function (): boolean { - throw new Error('Function not implemented.'); + // No-op: Return false to disable eager transitions + return false; }, trackSchedulerEvent: function (): void { - throw new Error('Function not implemented.'); + // No-op: Scheduler tracking not needed }, resolveEventType: function (): null | string { - throw new Error('Function not implemented.'); + // No-op: Return null for no event type + return null; }, resolveEventTimeStamp: function (): number { - throw new Error('Function not implemented.'); + // No-op: Return current time + return Date.now(); }, preloadInstance: function (): boolean { - throw new Error('Function not implemented.'); + // No-op: Return false, no preloading needed + return false; }, startSuspendingCommit: function (): void { - throw new Error('Function not implemented.'); + // No-op: Suspense not supported in our renderer }, suspendInstance: function (): void { - throw new Error('Function not implemented.'); + // No-op: Suspense not supported in our renderer }, waitForCommitToBeReady: function () { - throw new Error('Function not implemented.'); + // No-op: Return null, commit is always ready + return null; } }; diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock index 9f841ca6..b02445b7 100644 --- a/src-tauri/Cargo.lock +++ b/src-tauri/Cargo.lock @@ -123,6 +123,56 @@ dependencies = [ "libc", ] +[[package]] +name = "anstream" +version = "0.6.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" + +[[package]] +name = "anstyle-parse" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" +dependencies = [ + "windows-sys 0.60.2", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" +dependencies = [ + "anstyle", + "once_cell_polyfill", + "windows-sys 0.60.2", +] + [[package]] name = "anyhow" version = "1.0.98" @@ -176,6 +226,12 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +[[package]] +name = "as-raw-xcb-connection" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175571dd1d178ced59193a6fc02dde1b972eb0bc56c892cde9beeceac5bf0f6b" + [[package]] name = "ashpd" version = "0.11.0" @@ -763,6 +819,46 @@ dependencies = [ "inout", ] +[[package]] +name = "clap" +version = "4.5.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.49" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.103", +] + +[[package]] +name = "clap_lex" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" + [[package]] name = "clipboard-win" version = "5.4.0" @@ -823,6 +919,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" +[[package]] +name = "colorchoice" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" + [[package]] name = "combine" version = "4.6.7" @@ -1697,7 +1799,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flare" -version = "0.1.0" +version = "0.1.1" dependencies = [ "aes-gcm", "anyhow", @@ -1705,8 +1807,11 @@ dependencies = [ "bincode", "bytes", "chrono", + "clap", + "dirs 5.0.1", "enigo 0.5.0", "evdev", + "flate2", "freedesktop-file-parser", "futures-util", "hex", @@ -1727,6 +1832,8 @@ dependencies = [ "serde", "serde_json", "sha2", + "sysinfo", + "tar", "tauri", "tauri-build", "tauri-plugin-clipboard-manager", @@ -1741,10 +1848,14 @@ dependencies = [ "tauri-plugin-single-instance", "tokio", "tokio-tungstenite", + "tracing", + "tracing-subscriber", "trash", "url", + "urlencoding", "uuid", "walkdir", + "x11rb", "xkbcommon 0.8.0", "zbus", "zip", @@ -2866,6 +2977,12 @@ dependencies = [ "once_cell", ] +[[package]] +name = "is_terminal_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" + [[package]] name = "itertools" version = "0.12.1" @@ -3249,6 +3366,15 @@ dependencies = [ "syn 2.0.103", ] +[[package]] +name = "matchers" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" +dependencies = [ + "regex-automata", +] + [[package]] name = "matches" version = "0.1.10" @@ -3518,6 +3644,24 @@ dependencies = [ "walkdir", ] +[[package]] +name = "ntapi" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c70f219e21142367c70c0b30c6a9e3a14d55b4d12a204d897fbec83a0363f081" +dependencies = [ + "winapi", +] + +[[package]] +name = "nu-ansi-term" +version = "0.50.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" +dependencies = [ + "windows-sys 0.60.2", +] + [[package]] name = "num-bigint" version = "0.4.6" @@ -3833,6 +3977,12 @@ version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + [[package]] name = "opaque-debug" version = "0.3.1" @@ -5292,6 +5442,15 @@ dependencies = [ "digest", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + [[package]] name = "shared_child" version = "1.1.0" @@ -5547,6 +5706,20 @@ dependencies = [ "libc", ] +[[package]] +name = "sysinfo" +version = "0.32.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c33cd241af0f2e9e3b5c32163b873b29956890b5342e6745b917ce9d490f4af" +dependencies = [ + "core-foundation-sys 0.8.7", + "libc", + "memchr", + "ntapi", + "rayon", + "windows 0.56.0", +] + [[package]] name = "system-configuration" version = "0.6.1" @@ -5637,6 +5810,17 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" +[[package]] +name = "tar" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" +dependencies = [ + "filetime", + "libc", + "xattr", +] + [[package]] name = "target-lexicon" version = "0.12.16" @@ -6126,6 +6310,15 @@ dependencies = [ "syn 2.0.103", ] +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + [[package]] name = "tiff" version = "0.9.1" @@ -6414,6 +6607,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" dependencies = [ "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex-automata", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", ] [[package]] @@ -6638,6 +6861,12 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + [[package]] name = "uuid" version = "1.17.0" @@ -6661,6 +6890,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + [[package]] name = "vcpkg" version = "0.2.15" @@ -7728,7 +7963,9 @@ version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d91ffca73ee7f68ce055750bf9f6eca0780b8c85eff9bc046a3b0da41755e12" dependencies = [ + "as-raw-xcb-connection", "gethostname 0.4.3", + "libc", "rustix 0.38.44", "x11rb-protocol", ] @@ -7739,6 +7976,16 @@ version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec107c4503ea0b4a98ef47356329af139c0a4f7750e621cf2973cd3385ebcb3d" +[[package]] +name = "xattr" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156" +dependencies = [ + "libc", + "rustix 1.0.7", +] + [[package]] name = "xdg" version = "2.5.2" diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index 55a7a3d2..b61d72ce 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "flare" -version = "0.1.0" +version = "0.1.1" description = "A Tauri App" authors = ["you"] edition = "2021" @@ -18,7 +18,7 @@ crate-type = ["staticlib", "cdylib", "rlib"] tauri-build = { version = "2", features = [] } [dependencies] -tauri = { version = "2", features = ["protocol-asset"] } +tauri = { version = "2", features = ["protocol-asset", "tray-icon"] } tauri-plugin-opener = "2" tauri-plugin-deep-link = "2" serde = { version = "1", features = ["derive"] } @@ -64,6 +64,15 @@ notify = "6.1.1" notify-debouncer-full = "0.3.1" percent-encoding = "2.3.1" tauri-plugin-os = "2" +dirs = "5.0" +sysinfo = "0.32" +urlencoding = "2.1" +flate2 = "1.0" +tar = "0.4" +x11rb = { version = "0.13", features = ["allow-unsafe-code"] } +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter"] } +clap = { version = "4", features = ["derive"] } [target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies] tauri-plugin-global-shortcut = "2" @@ -80,4 +89,11 @@ codegen-units = 1 lto = true opt-level = "s" # rust is not the bottleneck, we can optimize for size with a small performance hit panic = "abort" -strip = true +strip = "symbols" # Keep debuginfo symbols (including Tauri metadata) but strip symbol table + +# Faster builds for development/testing (use with: cargo build --profile release-fast) +[profile.release-fast] +inherits = "release" +lto = false # Disable LTO for much faster linking +codegen-units = 16 # Enable parallel code generation +strip = "symbols" diff --git a/src-tauri/runner.sh b/src-tauri/runner.sh new file mode 100755 index 00000000..a998e831 --- /dev/null +++ b/src-tauri/runner.sh @@ -0,0 +1,4 @@ +#!/bin/bash +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +export LD_LIBRARY_PATH="$SCRIPT_DIR/SoulverWrapper/.build/release:$SCRIPT_DIR/SoulverWrapper/Vendor/SoulverCore-linux" +exec "$@" diff --git a/src-tauri/src/ai.rs b/src-tauri/src/ai.rs index 75e1e2cb..6d8b8670 100644 --- a/src-tauri/src/ai.rs +++ b/src-tauri/src/ai.rs @@ -23,6 +23,15 @@ const AI_USAGE_SCHEMA: &str = "CREATE TABLE IF NOT EXISTS ai_generations ( total_cost REAL NOT NULL )"; +const AI_CONVERSATIONS_SCHEMA: &str = "CREATE TABLE IF NOT EXISTS ai_conversations ( + id TEXT PRIMARY KEY, + title TEXT NOT NULL, + created_at INTEGER NOT NULL, + updated_at INTEGER NOT NULL, + model TEXT, + messages TEXT NOT NULL +)"; + #[derive(Deserialize)] #[serde(rename_all = "camelCase")] pub struct AskOptions { @@ -44,6 +53,24 @@ pub struct StreamEnd { full_text: String, } +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct Message { + pub role: String, + pub content: String, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct Conversation { + pub id: String, + pub title: String, + pub created_at: i64, + pub updated_at: i64, + pub model: Option, + pub messages: Vec, +} + #[derive(Serialize, Deserialize, Debug, Clone)] pub struct GenerationData { pub id: String, @@ -135,13 +162,48 @@ static DEFAULT_AI_MODELS: Lazy> = Lazy::new( m }); -#[derive(Serialize, Deserialize, Default, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] +#[serde(rename_all = "camelCase")] +pub enum AiProvider { + OpenRouter, + Ollama, +} + +impl Default for AiProvider { + fn default() -> Self { + AiProvider::OpenRouter + } +} + +#[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] pub struct AiSettings { enabled: bool, + #[serde(default)] + provider: AiProvider, + #[serde(default)] + base_url: Option, + #[serde(default = "default_temperature")] + temperature: f64, model_associations: HashMap, } +impl Default for AiSettings { + fn default() -> Self { + Self { + enabled: false, + provider: AiProvider::default(), + base_url: None, + temperature: default_temperature(), + model_associations: HashMap::new(), + } + } +} + +fn default_temperature() -> f64 { + 0.7 +} + fn get_settings_path(app: &tauri::AppHandle) -> Result { let data_dir = app .path() @@ -195,6 +257,9 @@ pub fn set_ai_settings(app: tauri::AppHandle, settings: AiSettings) -> Result<() let mut settings_to_save = AiSettings { enabled: settings.enabled, + provider: settings.provider, + base_url: settings.base_url, + temperature: settings.temperature, model_associations: HashMap::new(), }; @@ -255,6 +320,18 @@ impl AiUsageManager { pub fn new(app_handle: &AppHandle) -> Result { let store = Store::new(app_handle, "ai_usage.sqlite")?; store.init_table(AI_USAGE_SCHEMA)?; + store.init_table(AI_CONVERSATIONS_SCHEMA)?; + + // Add indices for performance + store.execute( + "CREATE INDEX IF NOT EXISTS idx_ai_generations_created ON ai_generations(created)", + params![], + )?; + store.execute( + "CREATE INDEX IF NOT EXISTS idx_ai_conversations_updated ON ai_conversations(updated_at)", + params![], + )?; + Ok(Self { store }) } @@ -331,6 +408,187 @@ async fn fetch_and_log_usage( Ok(()) } +#[tauri::command] +pub async fn get_ollama_models(base_url: String) -> Result, String> { + let client = reqwest::Client::new(); + let base = if base_url.trim().is_empty() { + "http://localhost:11434/v1".to_string() + } else { + base_url + }; + let url = format!("{}/models", base.trim_end_matches('/')); + + let res = client.get(&url).send().await.map_err(|e| e.to_string())?; + + if !res.status().is_success() { + return Err(format!("Failed to fetch models: {}", res.status())); + } + + let json: Value = res.json().await.map_err(|e| e.to_string())?; + + // Ollama's /v1/models returns an object with a "data" array of models + // Each model has an "id" field in the OpenAI-compatible API + let mut model_ids = Vec::new(); + if let Some(data) = json.get("data").and_then(|d| d.as_array()) { + for model in data { + if let Some(id) = model.get("id").and_then(|id| id.as_str()) { + model_ids.push(id.to_string()); + } + } + } else { + // Fallback for Ollama's native API /api/tags if /v1/models fails or is different + // But since we are using /v1 base_url, /v1/models is preferred + return Err("Unexpected response format from Ollama models API".to_string()); + } + + Ok(model_ids) +} + +// Conversation Management Commands + +#[tauri::command] +pub fn create_conversation( + app_handle: AppHandle, + title: String, + model: Option, +) -> Result { + let usage_manager = app_handle.state::(); + let id = uuid::Uuid::new_v4().to_string(); + let now = chrono::Utc::now().timestamp(); + + let conversation = Conversation { + id: id.clone(), + title, + created_at: now, + updated_at: now, + model, + messages: Vec::new(), + }; + + let messages_json = serde_json::to_string(&conversation.messages).map_err(|e| e.to_string())?; + + usage_manager.store.execute( + "INSERT INTO ai_conversations (id, title, created_at, updated_at, model, messages) VALUES (?1, ?2, ?3, ?4, ?5, ?6)", + params![ + conversation.id, + conversation.title, + conversation.created_at, + conversation.updated_at, + conversation.model, + messages_json + ], + ).map_err(|e| e.to_string())?; + + Ok(conversation) +} + +#[tauri::command] +pub fn list_conversations(app_handle: AppHandle) -> Result, String> { + let usage_manager = app_handle.state::(); + + let conn = usage_manager.store.conn(); + let mut stmt = conn + .prepare("SELECT id, title, created_at, updated_at, model, messages FROM ai_conversations ORDER BY updated_at DESC") + .map_err(|e| e.to_string())?; + + let conversations = stmt + .query_map([], |row| { + let messages_json: String = row.get(5)?; + let messages: Vec = + serde_json::from_str(&messages_json).unwrap_or_else(|_| Vec::new()); + + Ok(Conversation { + id: row.get(0)?, + title: row.get(1)?, + created_at: row.get(2)?, + updated_at: row.get(3)?, + model: row.get(4)?, + messages, + }) + }) + .map_err(|e| e.to_string())? + .collect::, _>>() + .map_err(|e| e.to_string())?; + + Ok(conversations) +} + +#[tauri::command] +pub fn get_conversation(app_handle: AppHandle, id: String) -> Result, String> { + let usage_manager = app_handle.state::(); + + let conn = usage_manager.store.conn(); + let mut stmt = conn + .prepare("SELECT id, title, created_at, updated_at, model, messages FROM ai_conversations WHERE id = ?1") + .map_err(|e| e.to_string())?; + + let result = stmt.query_row([id], |row| { + let messages_json: String = row.get(5)?; + let messages: Vec = + serde_json::from_str(&messages_json).unwrap_or_else(|_| Vec::new()); + + Ok(Conversation { + id: row.get(0)?, + title: row.get(1)?, + created_at: row.get(2)?, + updated_at: row.get(3)?, + model: row.get(4)?, + messages, + }) + }); + + match result { + Ok(conv) => Ok(Some(conv)), + Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None), + Err(e) => Err(e.to_string()), + } +} + +#[tauri::command] +pub fn update_conversation( + app_handle: AppHandle, + id: String, + title: Option, + messages: Option>, +) -> Result<(), String> { + let usage_manager = app_handle.state::(); + let now = chrono::Utc::now().timestamp(); + + if let Some(msgs) = messages { + let messages_json = serde_json::to_string(&msgs).map_err(|e| e.to_string())?; + usage_manager + .store + .execute( + "UPDATE ai_conversations SET messages = ?1, updated_at = ?2 WHERE id = ?3", + params![messages_json, now, id], + ) + .map_err(|e| e.to_string())?; + } + + if let Some(t) = title { + usage_manager + .store + .execute( + "UPDATE ai_conversations SET title = ?1, updated_at = ?2 WHERE id = ?3", + params![t, now, id], + ) + .map_err(|e| e.to_string())?; + } + + Ok(()) +} + +#[tauri::command] +pub fn delete_conversation(app_handle: AppHandle, id: String) -> Result<(), String> { + let usage_manager = app_handle.state::(); + + usage_manager + .store + .execute("DELETE FROM ai_conversations WHERE id = ?1", params![id]) + .map(|_| ()) + .map_err(|e| e.to_string()) +} + #[tauri::command] pub async fn ai_ask_stream( app_handle: AppHandle, @@ -343,11 +601,14 @@ pub async fn ai_ask_stream( return Err("AI features are not enabled.".to_string()); } - let api_key = + let api_key = if settings.provider == AiProvider::OpenRouter { match get_keyring_entry().and_then(|entry| entry.get_password().map_err(AppError::from)) { Ok(key) => key, Err(e) => return Err(e.to_string()), - }; + } + } else { + String::new() // Ollama doesn't need an API key + }; let model_key = options.model.unwrap_or_else(|| "default".to_string()); @@ -355,14 +616,18 @@ pub async fn ai_ask_stream( .model_associations .get(&model_key) .cloned() - .unwrap_or_else(|| "mistralai/mistral-7b-instruct:free".to_string()); + .unwrap_or_else(|| match settings.provider { + AiProvider::OpenRouter => "mistralai/mistral-7b-instruct:free".to_string(), + AiProvider::Ollama => "llama3".to_string(), + }); + // Use configured temperature, allow creativity parameter to override if provided let temperature = match options.creativity.as_deref() { Some("none") => 0.0, Some("low") => 0.4, Some("medium") => 0.7, Some("high") => 1.0, - _ => 0.7, + _ => settings.temperature, }; let body = serde_json::json!({ @@ -372,15 +637,32 @@ pub async fn ai_ask_stream( "temperature": temperature, }); + let (api_url, auth_header) = match settings.provider { + AiProvider::OpenRouter => ( + "https://openrouter.ai/api/v1/chat/completions".to_string(), + Some(format!("Bearer {}", api_key)), + ), + AiProvider::Ollama => { + let base = settings + .base_url + .filter(|s| !s.trim().is_empty()) + .unwrap_or_else(|| "http://localhost:11434/v1".to_string()); + ( + format!("{}/chat/completions", base.trim_end_matches('/')), + None, + ) + } + }; + let client = reqwest::Client::new(); - let res = client - .post("https://openrouter.ai/api/v1/chat/completions") - .header("Authorization", format!("Bearer {}", api_key)) - .header("HTTP-Referer", "http://localhost") - .json(&body) - .send() - .await - .map_err(|e| e.to_string())?; + let mut request = client.post(&api_url).json(&body); + + if let Some(auth) = auth_header { + request = request.header("Authorization", auth); + request = request.header("HTTP-Referer", "http://localhost"); + } + + let res = request.send().await.map_err(|e| e.to_string())?; let open_router_request_id = res .headers() @@ -440,13 +722,15 @@ pub async fn ai_ask_stream( ) .map_err(|e| e.to_string())?; - if let Some(or_req_id) = open_router_request_id { - let handle_clone = app_handle.clone(); - tokio::spawn(async move { - if let Err(e) = fetch_and_log_usage(or_req_id, api_key, handle_clone).await { - eprintln!("[AI Usage Tracking] Error: {}", e); - } - }); + if settings.provider == AiProvider::OpenRouter { + if let Some(or_req_id) = open_router_request_id { + let handle_clone = app_handle.clone(); + tokio::spawn(async move { + if let Err(e) = fetch_and_log_usage(or_req_id, api_key, handle_clone).await { + tracing::error!(error = %e, "AI usage tracking failed"); + } + }); + } } Ok(()) diff --git a/src-tauri/src/ai_presets.rs b/src-tauri/src/ai_presets.rs new file mode 100644 index 00000000..3bf44f8a --- /dev/null +++ b/src-tauri/src/ai_presets.rs @@ -0,0 +1,157 @@ +use crate::error::AppError; +use crate::store::{Storable, Store}; +use rusqlite::params; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use tauri::{AppHandle, State}; +use uuid::Uuid; + +const AI_PRESETS_SCHEMA: &str = "CREATE TABLE IF NOT EXISTS ai_presets ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL, + template TEXT NOT NULL, + icon TEXT, + created_at INTEGER NOT NULL +)"; + +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct AiPreset { + pub id: String, + pub name: String, + pub template: String, + pub icon: Option, + pub created_at: i64, +} + +impl Storable for AiPreset { + fn from_row(row: &rusqlite::Row) -> rusqlite::Result { + Ok(AiPreset { + id: row.get(0)?, + name: row.get(1)?, + template: row.get(2)?, + icon: row.get(3)?, + created_at: row.get(4)?, + }) + } +} + +pub struct AiPresetManager { + store: Arc, +} + +impl AiPresetManager { + pub fn new(app_handle: &AppHandle) -> Result { + let store = Arc::new(Store::new(app_handle, "ai_presets.sqlite")?); + store.init_table(AI_PRESETS_SCHEMA)?; + + // Seed default presets if empty + let count: i64 = store.conn().query_row( + "SELECT COUNT(*) FROM ai_presets", + [], + |row| row.get(0), + )?; + + if count == 0 { + let defaults = vec![ + ( + "Summarize Selection", + "Summarize the following text concisely:\n\n{selection}", + "text-align-left", + ), + ( + "Fix Spelling & Grammar", + "Fix the spelling and grammar in the following text. Output only the corrected text:\n\n{selection}", + "pencil-1", + ), + ( + "Explain Code", + "Explain the following code snippet:\n\n{selection}", + "code", + ), + ( + "Improve Writing", + "Rewrite the following text to be more clear and professional:\n\n{selection}", + "magic-wand", + ), + ]; + + for (name, template, icon) in defaults { + let id = Uuid::new_v4().to_string(); + let now = chrono::Utc::now().timestamp(); + store.execute( + "INSERT INTO ai_presets (id, name, template, icon, created_at) VALUES (?1, ?2, ?3, ?4, ?5)", + params![id, name, template, icon, now], + )?; + } + } + + Ok(Self { store }) + } + + pub fn get_all(&self) -> Result, AppError> { + self.store.query("SELECT id, name, template, icon, created_at FROM ai_presets ORDER BY name ASC", []) + } + + pub fn create(&self, name: String, template: String, icon: Option) -> Result { + let id = Uuid::new_v4().to_string(); + let now = chrono::Utc::now().timestamp(); + + self.store.execute( + "INSERT INTO ai_presets (id, name, template, icon, created_at) VALUES (?1, ?2, ?3, ?4, ?5)", + params![id, name, template, icon, now], + )?; + + Ok(AiPreset { + id, + name, + template, + icon, + created_at: now, + }) + } + + pub fn update(&self, id: String, name: String, template: String, icon: Option) -> Result<(), AppError> { + self.store.execute( + "UPDATE ai_presets SET name = ?1, template = ?2, icon = ?3 WHERE id = ?4", + params![name, template, icon, id], + )?; + Ok(()) + } + + pub fn delete(&self, id: String) -> Result<(), AppError> { + self.store.execute("DELETE FROM ai_presets WHERE id = ?1", params![id])?; + Ok(()) + } +} + +#[tauri::command] +pub fn get_ai_presets(manager: State) -> Result, String> { + manager.get_all().map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn create_ai_preset( + manager: State, + name: String, + template: String, + icon: Option, +) -> Result { + manager.create(name, template, icon).map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn update_ai_preset( + manager: State, + id: String, + name: String, + template: String, + icon: Option, +) -> Result<(), String> { + manager.update(id, name, template, icon).map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn delete_ai_preset(manager: State, id: String) -> Result<(), String> { + manager.delete(id).map_err(|e| e.to_string()) +} diff --git a/src-tauri/src/aliases.rs b/src-tauri/src/aliases.rs new file mode 100644 index 00000000..34de1854 --- /dev/null +++ b/src-tauri/src/aliases.rs @@ -0,0 +1,90 @@ +use crate::error::AppError; +use rusqlite::params; +use std::collections::HashMap; +use std::sync::{Arc, Mutex}; +use tauri::{AppHandle, Manager, State}; + +const ALIASES_SCHEMA: &str = "CREATE TABLE IF NOT EXISTS command_aliases ( + alias TEXT PRIMARY KEY, + command_id TEXT NOT NULL +)"; + +pub struct AliasManager { + store: Arc>, +} + +impl AliasManager { + pub fn new(app_handle: &AppHandle) -> Result { + let app_dir = app_handle + .path() + .app_local_data_dir() + .map_err(|_| AppError::DirectoryNotFound)?; + + if !app_dir.exists() { + std::fs::create_dir_all(&app_dir)?; + } + + let db_path = app_dir.join("aliases.db"); + let store = rusqlite::Connection::open(db_path)?; + + store.execute(ALIASES_SCHEMA, [])?; + + Ok(Self { + store: Arc::new(Mutex::new(store)), + }) + } + + pub fn get_all(&self) -> Result, AppError> { + let store = self.store.lock().expect("alias store mutex poisoned"); + let mut stmt = store.prepare("SELECT alias, command_id FROM command_aliases")?; + + let rows = stmt.query_map([], |row| { + Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?)) + })?; + + let mut aliases = HashMap::new(); + for row in rows { + let (alias, command_id) = row?; + aliases.insert(alias, command_id); + } + + Ok(aliases) + } + + pub fn set_alias(&self, alias: String, command_id: String) -> Result<(), AppError> { + let store = self.store.lock().expect("alias store mutex poisoned"); + store.execute( + "INSERT OR REPLACE INTO command_aliases (alias, command_id) VALUES (?1, ?2)", + params![alias, command_id], + )?; + Ok(()) + } + + pub fn remove_alias(&self, alias: String) -> Result<(), AppError> { + let store = self.store.lock().expect("alias store mutex poisoned"); + store.execute( + "DELETE FROM command_aliases WHERE alias = ?1", + params![alias], + )?; + Ok(()) + } +} + +#[tauri::command] +pub fn get_aliases(manager: State) -> Result, String> { + manager.get_all().map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn set_alias( + manager: State, + alias: String, + command_id: String, +) -> Result<(), String> { + manager.set_alias(alias, command_id).map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn remove_alias(manager: State, alias: String) -> Result<(), String> { + manager.remove_alias(alias).map_err(|e| e.to_string()) +} diff --git a/src-tauri/src/auto_start.rs b/src-tauri/src/auto_start.rs new file mode 100644 index 00000000..a175838a --- /dev/null +++ b/src-tauri/src/auto_start.rs @@ -0,0 +1,92 @@ +use std::fs; +use std::path::PathBuf; + +/// Enable or disable auto-start on login (Linux XDG standard) +pub fn set_auto_start(enabled: bool) -> Result<(), String> { + #[cfg(target_os = "linux")] + { + let autostart_dir = get_autostart_dir()?; + let desktop_file_path = autostart_dir.join("flareup.desktop"); + + if enabled { + // Create autostart directory if it doesn't exist + fs::create_dir_all(&autostart_dir) + .map_err(|e| format!("Failed to create autostart directory: {}", e))?; + + // Get the executable path + let exe_path = std::env::current_exe() + .map_err(|e| format!("Failed to get executable path: {}", e))?; + + // Create desktop entry content + let desktop_entry = format!( + "[Desktop Entry]\n\ + Type=Application\n\ + Name=Flareup\n\ + Comment=Raycast for Linux\n\ + Exec={}\n\ + Terminal=false\n\ + Categories=Utility;\n\ + X-GNOME-Autostart-enabled=true\n", + exe_path.display() + ); + + // Write the desktop file + fs::write(&desktop_file_path, desktop_entry) + .map_err(|e| format!("Failed to write autostart file: {}", e))?; + + tracing::info!("Auto-start enabled"); + Ok(()) + } else { + // Remove the desktop file if it exists + if desktop_file_path.exists() { + fs::remove_file(&desktop_file_path) + .map_err(|e| format!("Failed to remove autostart file: {}", e))?; + tracing::info!("Auto-start disabled"); + } + Ok(()) + } + } + + #[cfg(not(target_os = "linux"))] + { + Err("Auto-start is currently only supported on Linux".to_string()) + } +} + +/// Check if auto-start is currently enabled +pub fn is_auto_start_enabled() -> Result { + #[cfg(target_os = "linux")] + { + let autostart_dir = get_autostart_dir()?; + let desktop_file_path = autostart_dir.join("flareup.desktop"); + Ok(desktop_file_path.exists()) + } + + #[cfg(not(target_os = "linux"))] + { + Ok(false) + } +} + +#[cfg(target_os = "linux")] +fn get_autostart_dir() -> Result { + let home = + std::env::var("HOME").map_err(|_| "HOME environment variable not set".to_string())?; + + // Check XDG_CONFIG_HOME first, fallback to ~/.config + let config_home = + std::env::var("XDG_CONFIG_HOME").unwrap_or_else(|_| format!("{}/.config", home)); + + Ok(PathBuf::from(config_home).join("autostart")) +} + +// Tauri command +#[tauri::command] +pub fn set_auto_start_enabled(enabled: bool) -> Result<(), String> { + set_auto_start(enabled) +} + +#[tauri::command] +pub fn get_auto_start_enabled() -> Result { + is_auto_start_enabled() +} diff --git a/src-tauri/src/browser_extension.rs b/src-tauri/src/browser_extension.rs index 29d0dad1..8ebdee75 100644 --- a/src-tauri/src/browser_extension.rs +++ b/src-tauri/src/browser_extension.rs @@ -9,6 +9,7 @@ use tokio::sync::oneshot; use tokio_tungstenite::tungstenite::Message; #[derive(Serialize, Deserialize)] +#[allow(dead_code)] struct JsonRpcRequest { jsonrpc: String, method: String, @@ -17,6 +18,7 @@ struct JsonRpcRequest { } #[derive(Serialize, Deserialize, Debug)] +#[allow(dead_code)] struct JsonRpcResponse { jsonrpc: String, #[serde(skip_serializing_if = "Option::is_none")] @@ -27,6 +29,7 @@ struct JsonRpcResponse { } #[derive(Serialize, Deserialize, Debug)] +#[allow(dead_code)] struct JsonRpcError { code: i32, message: String, @@ -77,17 +80,20 @@ async fn handle_connection(stream: TcpStream, app_handle: AppHandle) { let ws_stream = match tokio_tungstenite::accept_async(stream).await { Ok(ws) => ws, Err(e) => { - eprintln!("WebSocket handshake error: {}", e); + tracing::warn!(error = %e, "WebSocket handshake error"); return; } }; - *state.is_connected.lock().unwrap() = true; - println!("Browser extension connected."); + *state + .is_connected + .lock() + .expect("is_connected mutex poisoned") = true; + tracing::info!("Browser extension connected"); let (mut ws_sender, mut ws_receiver) = ws_stream.split(); let (tx, mut rx) = tokio::sync::mpsc::channel::(100); - *state.connection.lock().unwrap() = Some(tx); + *state.connection.lock().expect("connection mutex poisoned") = Some(tx); let sender_task = tokio::spawn(async move { while let Some(msg_to_send) = rx.recv().await { @@ -122,7 +128,7 @@ async fn handle_connection(stream: TcpStream, app_handle: AppHandle) { .state::() .connection .lock() - .unwrap() + .expect("connection mutex poisoned") .clone(); if let Some(tx) = tx { let _ = tx.send(response.to_string()).await; @@ -134,7 +140,7 @@ async fn handle_connection(stream: TcpStream, app_handle: AppHandle) { .state::() .pending_requests .lock() - .unwrap() + .expect("pending_requests mutex poisoned") .remove(&id); if let Some(sender) = sender { if !error.is_null() { @@ -145,10 +151,10 @@ async fn handle_connection(stream: TcpStream, app_handle: AppHandle) { } } Ok(IncomingMessage::Notification { method, params }) => { - println!("Received notification: {} with params {:?}", method, params); + tracing::debug!(method = %method, ?params, "Received notification"); } Err(e) => { - eprintln!("Failed to parse message from browser extension: {}", e); + tracing::warn!(error = %e, "Failed to parse message from browser extension"); } } } @@ -160,15 +166,25 @@ async fn handle_connection(stream: TcpStream, app_handle: AppHandle) { _ = receiver_task => {}, } - *state.is_connected.lock().unwrap() = false; - *state.connection.lock().unwrap() = None; - println!("Browser extension disconnected."); + *state + .is_connected + .lock() + .expect("is_connected mutex poisoned") = false; + *state.connection.lock().expect("connection mutex poisoned") = None; + tracing::info!("Browser extension disconnected"); } pub async fn run_server(app_handle: AppHandle) { let addr = "127.0.0.1:7265"; - let listener = TcpListener::bind(&addr).await.expect("Failed to bind"); - println!("WebSocket server listening on ws://{}", addr); + let listener = match TcpListener::bind(&addr).await { + Ok(l) => l, + Err(e) => { + tracing::error!("Failed to bind browser extension port {}: {}", addr, e); + tracing::warn!("Browser extension WebSocket server will not be available"); + return; + } + }; + tracing::info!("WebSocket server listening on ws://{}", addr); while let Ok((stream, _)) = listener.accept().await { tokio::spawn(handle_connection(stream, app_handle.clone())); @@ -179,7 +195,10 @@ pub async fn run_server(app_handle: AppHandle) { pub async fn browser_extension_check_connection( state: tauri::State<'_, WsState>, ) -> Result { - Ok(*state.is_connected.lock().unwrap()) + Ok(*state + .is_connected + .lock() + .expect("is_connected mutex poisoned")) } #[tauri::command] @@ -191,13 +210,16 @@ pub async fn browser_extension_request( use std::time::Duration; let tx = { - let lock = state.connection.lock().unwrap(); + let lock = state.connection.lock().expect("connection mutex poisoned"); lock.clone() }; if let Some(tx) = tx { let request_id = { - let mut counter = state.request_id_counter.lock().unwrap(); + let mut counter = state + .request_id_counter + .lock() + .expect("request_id_counter mutex poisoned"); *counter += 1; *counter }; @@ -213,7 +235,7 @@ pub async fn browser_extension_request( state .pending_requests .lock() - .unwrap() + .expect("pending_requests mutex poisoned") .insert(request_id, response_tx); if tx.send(request.to_string()).await.is_err() { diff --git a/src-tauri/src/cache.rs b/src-tauri/src/cache.rs index 4aeb580b..13e37d6f 100644 --- a/src-tauri/src/cache.rs +++ b/src-tauri/src/cache.rs @@ -73,7 +73,7 @@ impl AppCache { if let Ok(cache_path) = Self::get_cache_path(app) { if let Err(e) = cache_data.write_to_file(&cache_path) { - eprintln!("Failed to write to app cache: {:?}", e); + tracing::warn!(error = ?e, "Failed to write to app cache"); } } @@ -82,7 +82,7 @@ impl AppCache { pub fn refresh_background(app: AppHandle) { if let Err(e) = Self::refresh_and_get_apps(&app) { - eprintln!("Error refreshing app cache in background: {:?}", e); + tracing::warn!(error = ?e, "Error refreshing app cache in background"); } } } diff --git a/src-tauri/src/cli_substitutes.rs b/src-tauri/src/cli_substitutes.rs new file mode 100644 index 00000000..4986c521 --- /dev/null +++ b/src-tauri/src/cli_substitutes.rs @@ -0,0 +1,216 @@ +use flate2::read::GzDecoder; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::fs; +use std::path::{Path, PathBuf}; +use tar::Archive; + +/// CLI binary substitution registry +/// Maps macOS binary names to their Linux download URLs and extraction paths + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CliSubstitute { + /// Name of the binary file to substitute + pub binary_name: String, + /// URL template for downloading Linux version (use {arch} placeholder) + pub download_url_template: String, + /// Path within the archive to the binary (if in a subdirectory) + pub binary_path_in_archive: Option, + /// Whether the download is a tar.gz archive + pub is_tar_gz: bool, +} + +/// Built-in registry of known CLI substitutes +pub fn get_builtin_registry() -> HashMap { + let mut registry = HashMap::new(); + + // Speedtest CLI by Ookla + registry.insert( + "speedtest".to_string(), + CliSubstitute { + binary_name: "speedtest".to_string(), + download_url_template: + "https://install.speedtest.net/app/cli/ookla-speedtest-1.2.0-linux-{arch}.tgz" + .to_string(), + binary_path_in_archive: Some("speedtest".to_string()), + is_tar_gz: true, + }, + ); + + registry +} + +/// Get the current architecture string for download URLs +fn get_arch_string() -> &'static str { + #[cfg(target_arch = "x86_64")] + { + "x86_64" + } + #[cfg(target_arch = "aarch64")] + { + "aarch64" + } + #[cfg(target_arch = "arm")] + { + "armhf" + } + #[cfg(not(any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "arm")))] + { + "x86_64" // fallback + } +} + +/// Download and extract a Linux CLI binary substitute +pub async fn download_substitute( + substitute: &CliSubstitute, + target_dir: &Path, +) -> Result { + let arch = get_arch_string(); + let url = substitute.download_url_template.replace("{arch}", arch); + + // Download the archive + let response = reqwest::get(&url) + .await + .map_err(|e| format!("Failed to download CLI substitute from {}: {}", url, e))?; + + if !response.status().is_success() { + return Err(format!( + "Failed to download CLI substitute: HTTP {}", + response.status() + )); + } + + let bytes = response + .bytes() + .await + .map_err(|e| format!("Failed to read response: {}", e))?; + + // Ensure target directory exists + fs::create_dir_all(target_dir) + .map_err(|e| format!("Failed to create target directory: {}", e))?; + + let target_binary_path = target_dir.join(&substitute.binary_name); + + if substitute.is_tar_gz { + // Extract from tar.gz + let cursor = std::io::Cursor::new(bytes.as_ref()); + let tar = GzDecoder::new(cursor); + let mut archive = Archive::new(tar); + + let binary_path_in_archive = substitute + .binary_path_in_archive + .as_ref() + .map(|s| s.as_str()) + .unwrap_or(&substitute.binary_name); + + let entries = archive.entries().map_err(|e| e.to_string())?; + for entry_result in entries { + let mut entry = entry_result.map_err(|e| e.to_string())?; + let entry_path = entry.path().map_err(|e| e.to_string())?; + + // Check if this is the binary we want + if entry_path.ends_with(binary_path_in_archive) { + // Extract to target location + let mut file = fs::File::create(&target_binary_path) + .map_err(|e| format!("Failed to create binary file: {}", e))?; + std::io::copy(&mut entry, &mut file) + .map_err(|e| format!("Failed to write binary: {}", e))?; + + // Make executable on Unix + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + fs::set_permissions(&target_binary_path, fs::Permissions::from_mode(0o755)) + .map_err(|e| format!("Failed to set permissions: {}", e))?; + } + + return Ok(target_binary_path); + } + } + + Err(format!( + "Binary '{}' not found in archive", + binary_path_in_archive + )) + } else { + // Direct binary download + fs::write(&target_binary_path, &bytes) + .map_err(|e| format!("Failed to write binary: {}", e))?; + + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + fs::set_permissions(&target_binary_path, fs::Permissions::from_mode(0o755)) + .map_err(|e| format!("Failed to set permissions: {}", e))?; + } + + Ok(target_binary_path) + } +} + +/// Check if a substitute exists for a given binary name +pub fn find_substitute(binary_name: &str) -> Option { + get_builtin_registry().get(binary_name).cloned() +} + +/// Substitute macOS binaries with Linux equivalents in an extension +pub async fn substitute_macos_binaries( + extension_dir: &Path, + macho_binaries: &[String], +) -> Result, String> { + let support_cli_dir = extension_dir.join("support").join("cli"); + let assets_dir = extension_dir.join("assets"); + + let mut substituted = Vec::new(); + + for binary_name in macho_binaries { + if let Some(substitute) = find_substitute(binary_name) { + // Download and install the Linux substitute + match download_substitute(&substitute, &support_cli_dir).await { + Ok(path) => { + // Also check if there's a binary in assets that needs replacing + let asset_binary = assets_dir.join(binary_name); + if asset_binary.exists() { + // Replace the asset binary with a symlink or copy + fs::copy(&path, &asset_binary) + .map_err(|e| format!("Failed to replace asset binary: {}", e))?; + } + + substituted.push(binary_name.clone()); + tracing::info!( + binary = %binary_name, + "Substituted macOS binary with Linux version" + ); + } + Err(e) => { + tracing::warn!(binary = %binary_name, error = %e, "Failed to substitute binary"); + } + } + } + } + + Ok(substituted) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_registry_has_speedtest() { + let registry = get_builtin_registry(); + assert!(registry.contains_key("speedtest")); + } + + #[test] + fn test_find_substitute() { + assert!(find_substitute("speedtest").is_some()); + assert!(find_substitute("nonexistent").is_none()); + } + + #[test] + fn test_arch_string() { + let arch = get_arch_string(); + assert!(!arch.is_empty()); + } +} diff --git a/src-tauri/src/clipboard_history/manager.rs b/src-tauri/src/clipboard_history/manager.rs index b195a0b5..69a13a7f 100644 --- a/src-tauri/src/clipboard_history/manager.rs +++ b/src-tauri/src/clipboard_history/manager.rs @@ -70,6 +70,20 @@ impl ClipboardHistoryManager { let store = Store::new(app_handle, "clipboard_history.sqlite")?; store.init_table(CLIPBOARD_SCHEMA)?; + // Add indices for performance + store.conn().execute( + "CREATE INDEX IF NOT EXISTS idx_clipboard_content_type ON clipboard_history(content_type)", + [], + )?; + store.conn().execute( + "CREATE INDEX IF NOT EXISTS idx_clipboard_pinned ON clipboard_history(is_pinned)", + [], + )?; + store.conn().execute( + "CREATE INDEX IF NOT EXISTS idx_clipboard_last_copied ON clipboard_history(last_copied_at)", + [], + )?; + let key = get_encryption_key()?; Ok(Self { @@ -251,7 +265,7 @@ pub static MANAGER: Lazy>> = Lazy::new(|| pub static INTERNAL_CLIPBOARD_CHANGE: AtomicBool = AtomicBool::new(false); pub fn init(app_handle: AppHandle) { - let mut manager_guard = MANAGER.lock().unwrap(); + let mut manager_guard = MANAGER.lock().expect("clipboard manager mutex poisoned"); if manager_guard.is_none() { match ClipboardHistoryManager::new(&app_handle) { Ok(manager) => { @@ -259,7 +273,7 @@ pub fn init(app_handle: AppHandle) { drop(manager_guard); start_monitoring(app_handle); } - Err(e) => eprintln!("Failed to create ClipboardHistoryManager: {:?}", e), + Err(e) => tracing::error!(error = ?e, "Failed to create ClipboardHistoryManager"), } } } diff --git a/src-tauri/src/clipboard_history/mod.rs b/src-tauri/src/clipboard_history/mod.rs index 7667e4cb..8e4fcc58 100644 --- a/src-tauri/src/clipboard_history/mod.rs +++ b/src-tauri/src/clipboard_history/mod.rs @@ -14,7 +14,11 @@ pub fn history_get_items( limit: u32, offset: u32, ) -> Result, String> { - if let Some(manager) = MANAGER.lock().unwrap().as_ref() { + if let Some(manager) = MANAGER + .lock() + .expect("clipboard manager mutex poisoned") + .as_ref() + { manager .get_items(filter, search_term, limit, offset) .map_err(|e| e.to_string()) @@ -25,7 +29,11 @@ pub fn history_get_items( #[tauri::command] pub fn history_get_item_content(id: i64) -> Result { - if let Some(manager) = MANAGER.lock().unwrap().as_ref() { + if let Some(manager) = MANAGER + .lock() + .expect("clipboard manager mutex poisoned") + .as_ref() + { manager.get_item_content(id).map_err(|e| e.to_string()) } else { Err("Clipboard history manager not initialized".to_string()) @@ -34,7 +42,11 @@ pub fn history_get_item_content(id: i64) -> Result { #[tauri::command] pub fn history_item_was_copied(id: i64) -> Result<(), String> { - if let Some(manager) = MANAGER.lock().unwrap().as_ref() { + if let Some(manager) = MANAGER + .lock() + .expect("clipboard manager mutex poisoned") + .as_ref() + { manager.item_was_copied(id).map_err(|e| e.to_string())?; Ok(()) } else { @@ -44,7 +56,11 @@ pub fn history_item_was_copied(id: i64) -> Result<(), String> { #[tauri::command] pub fn history_delete_item(id: i64) -> Result<(), String> { - if let Some(manager) = MANAGER.lock().unwrap().as_ref() { + if let Some(manager) = MANAGER + .lock() + .expect("clipboard manager mutex poisoned") + .as_ref() + { manager.delete_item(id).map_err(|e| e.to_string())?; Ok(()) } else { @@ -54,7 +70,11 @@ pub fn history_delete_item(id: i64) -> Result<(), String> { #[tauri::command] pub fn history_toggle_pin(id: i64) -> Result<(), String> { - if let Some(manager) = MANAGER.lock().unwrap().as_ref() { + if let Some(manager) = MANAGER + .lock() + .expect("clipboard manager mutex poisoned") + .as_ref() + { manager.toggle_pin(id).map_err(|e| e.to_string())?; Ok(()) } else { @@ -64,7 +84,11 @@ pub fn history_toggle_pin(id: i64) -> Result<(), String> { #[tauri::command] pub fn history_clear_all() -> Result<(), String> { - if let Some(manager) = MANAGER.lock().unwrap().as_ref() { + if let Some(manager) = MANAGER + .lock() + .expect("clipboard manager mutex poisoned") + .as_ref() + { manager.clear_all().map_err(|e| e.to_string())?; Ok(()) } else { diff --git a/src-tauri/src/clipboard_history/monitor.rs b/src-tauri/src/clipboard_history/monitor.rs index 12ad76b5..9dd0c77c 100644 --- a/src-tauri/src/clipboard_history/monitor.rs +++ b/src-tauri/src/clipboard_history/monitor.rs @@ -10,7 +10,13 @@ pub fn start_monitoring(_app_handle: AppHandle) { std::thread::spawn(move || { let mut last_text_hash = String::new(); let mut last_image_hash = String::new(); - let mut clipboard = arboard::Clipboard::new().unwrap(); + let mut clipboard = match arboard::Clipboard::new() { + Ok(c) => c, + Err(e) => { + tracing::error!(error = %e, "Failed to initialize clipboard monitor"); + return; + } + }; loop { if super::manager::INTERNAL_CLIPBOARD_CHANGE.load(std::sync::atomic::Ordering::SeqCst) { @@ -31,14 +37,16 @@ pub fn start_monitoring(_app_handle: AppHandle) { (ContentType::Text, text.to_string()) }; - if let Some(manager) = MANAGER.lock().unwrap().as_ref() { - if let Err(e) = manager.add_item( - current_hash.clone(), - content_type, - content_value, - None, - ) { - eprintln!("Error adding clipboard text item: {:?}", e); + if let Ok(guard) = MANAGER.lock() { + if let Some(manager) = guard.as_ref() { + if let Err(e) = manager.add_item( + current_hash.clone(), + content_type, + content_value, + None, + ) { + tracing::error!(error = ?e, "Error adding clipboard text item"); + } } } last_text_hash = current_hash; @@ -50,27 +58,30 @@ pub fn start_monitoring(_app_handle: AppHandle) { if let Ok(image_data) = clipboard.get_image() { let current_hash = hex::encode(Sha256::digest(&image_data.bytes)); if current_hash != last_image_hash { - if let Some(manager) = MANAGER.lock().unwrap().as_ref() { - let image_path = manager.image_dir.join(format!("{}.png", current_hash)); - match image::save_buffer( - &image_path, - &image_data.bytes, - image_data.width as u32, - image_data.height as u32, - image::ColorType::Rgba8, - ) { - Ok(_) => { - let content_value = image_path.to_string_lossy().to_string(); - if let Err(e) = manager.add_item( - current_hash.clone(), - ContentType::Image, - content_value, - None, - ) { - eprintln!("Error adding clipboard image item: {:?}", e); + if let Ok(guard) = MANAGER.lock() { + if let Some(manager) = guard.as_ref() { + let image_path = + manager.image_dir.join(format!("{}.png", current_hash)); + match image::save_buffer( + &image_path, + &image_data.bytes, + image_data.width as u32, + image_data.height as u32, + image::ColorType::Rgba8, + ) { + Ok(_) => { + let content_value = image_path.to_string_lossy().to_string(); + if let Err(e) = manager.add_item( + current_hash.clone(), + ContentType::Image, + content_value, + None, + ) { + tracing::error!(error = ?e, "Error adding clipboard image item"); + } } + Err(e) => tracing::error!(error = ?e, "Failed to save image"), } - Err(e) => eprintln!("Failed to save image: {:?}", e), } } last_image_hash = current_hash; diff --git a/src-tauri/src/dmenu.rs b/src-tauri/src/dmenu.rs new file mode 100644 index 00000000..df2ee248 --- /dev/null +++ b/src-tauri/src/dmenu.rs @@ -0,0 +1,161 @@ +use clap::{Parser, Subcommand}; +use std::io::{self, BufRead}; + +/// Flare Launcher - A Raycast-compatible launcher for Linux +#[derive(Parser)] +#[command(name = "flare")] +#[command(about = "A focused launcher for your desktop", long_about = None)] +pub struct Cli { + #[command(subcommand)] + pub command: Option, +} + +#[derive(Subcommand)] +pub enum Commands { + /// dmenu compatibility mode - read options from stdin, output selection to stdout + Dmenu { + /// Case insensitive matching + #[arg(short = 'i')] + case_insensitive: bool, + + /// Prompt string to display + #[arg(short = 'p', default_value = "")] + prompt: String, + + /// Number of lines to display (ignored, for compatibility) + #[arg(short = 'l')] + lines: Option, + + /// Monitor to display on (ignored, for compatibility) + #[arg(short = 'm')] + monitor: Option, + + /// Font (ignored, for compatibility) + #[arg(short = 'f', long = "fn")] + font: Option, + }, +} + +/// Holds the state for a dmenu session +#[derive(Debug, Clone)] +pub struct DmenuSession { + pub items: Vec, + pub case_insensitive: bool, + pub prompt: String, +} + +impl DmenuSession { + /// Create a new DmenuSession by reading items from stdin + pub fn from_stdin(case_insensitive: bool, prompt: String) -> io::Result { + let stdin = io::stdin(); + let items: Vec = stdin + .lock() + .lines() + .collect::, _>>()? + .into_iter() + .filter(|s| !s.is_empty()) + .collect(); + + Ok(Self { + items, + case_insensitive, + prompt, + }) + } + + /// Output the selected item to stdout + pub fn output_selection(&self, selection: &str) { + println!("{}", selection); + } + + /// Filter items based on search query + pub fn filter_items(&self, query: &str) -> Vec { + if query.is_empty() { + return self.items.clone(); + } + + let query_lower = query.to_lowercase(); + self.items + .iter() + .filter(|item| { + if self.case_insensitive { + item.to_lowercase().contains(&query_lower) + } else { + item.contains(query) + } + }) + .cloned() + .collect() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_dmenu_session_empty() { + let session = DmenuSession { + items: vec![], + case_insensitive: false, + prompt: String::new(), + }; + assert!(session.items.is_empty()); + } + + #[test] + fn test_dmenu_session_with_items() { + let session = DmenuSession { + items: vec!["Option 1".into(), "Option 2".into()], + case_insensitive: true, + prompt: "Select:".into(), + }; + assert_eq!(session.items.len(), 2); + assert!(session.case_insensitive); + assert_eq!(session.prompt, "Select:"); + } + + #[test] + fn test_filter_case_sensitive() { + let session = DmenuSession { + items: vec!["Firefox".into(), "CHROME".into(), "vivaldi".into()], + case_insensitive: false, + prompt: String::new(), + }; + let filtered = session.filter_items("Fire"); + assert_eq!(filtered, vec!["Firefox"]); + } + + #[test] + fn test_filter_case_insensitive() { + let session = DmenuSession { + items: vec!["Firefox".into(), "CHROME".into(), "vivaldi".into()], + case_insensitive: true, + prompt: String::new(), + }; + let filtered = session.filter_items("chrome"); + assert_eq!(filtered, vec!["CHROME"]); + } + + #[test] + fn test_filter_empty_query() { + let session = DmenuSession { + items: vec!["A".into(), "B".into(), "C".into()], + case_insensitive: false, + prompt: String::new(), + }; + let filtered = session.filter_items(""); + assert_eq!(filtered.len(), 3); + } + + #[test] + fn test_filter_no_matches() { + let session = DmenuSession { + items: vec!["Firefox".into(), "Chrome".into()], + case_insensitive: false, + prompt: String::new(), + }; + let filtered = session.filter_items("Safari"); + assert!(filtered.is_empty()); + } +} diff --git a/src-tauri/src/downloads/manager.rs b/src-tauri/src/downloads/manager.rs new file mode 100644 index 00000000..60d5d8fd --- /dev/null +++ b/src-tauri/src/downloads/manager.rs @@ -0,0 +1,243 @@ +use std::fs; +use std::path::Path; +use std::sync::{Arc, Mutex}; + +use chrono::{DateTime, Utc}; +use rusqlite::{params, Connection, Result as RusqliteResult}; +use tauri::{AppHandle, Manager}; + +use super::types::{is_incomplete_download, DownloadItem}; +use crate::error::AppError; + +pub struct DownloadsManager { + db: Arc>, +} + +impl DownloadsManager { + pub fn new(app_handle: &AppHandle) -> Result { + let data_dir = app_handle + .path() + .app_local_data_dir() + .map_err(|_| AppError::DirectoryNotFound)?; + + if !data_dir.exists() { + fs::create_dir_all(&data_dir).map_err(|e| AppError::FileSearch(e.to_string()))?; + } + + let db_path = data_dir.join("downloads.sqlite"); + let db = Connection::open(db_path)?; + + Ok(Self { + db: Arc::new(Mutex::new(db)), + }) + } + + pub fn init_db(&self) -> RusqliteResult<()> { + let db = self.db.lock().expect("downloads db mutex poisoned"); + + db.execute( + "CREATE TABLE IF NOT EXISTS downloads ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + path TEXT UNIQUE NOT NULL, + name TEXT NOT NULL, + extension TEXT, + file_type TEXT NOT NULL, + size_bytes INTEGER NOT NULL, + created_at TEXT NOT NULL, + accessed_at TEXT + )", + [], + )?; + + db.execute( + "CREATE INDEX IF NOT EXISTS idx_downloads_created ON downloads(created_at DESC)", + [], + )?; + + db.execute( + "CREATE INDEX IF NOT EXISTS idx_downloads_name ON downloads(name)", + [], + )?; + + Ok(()) + } + + pub fn add_download(&self, path: &Path) -> Result, AppError> { + let metadata = match fs::metadata(path) { + Ok(m) => m, + Err(_) => return Ok(None), // File doesn't exist or can't access + }; + + let name = path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("unknown") + .to_string(); + + let extension = path + .extension() + .and_then(|e| e.to_str()) + .map(|s| s.to_lowercase()); + + // Skip incomplete downloads + if is_incomplete_download(extension.as_deref()) { + return Ok(None); + } + + let file_type = if metadata.is_dir() { + "directory" + } else { + "file" + } + .to_string(); + + let size_bytes = metadata.len() as i64; + + let created_at = metadata + .created() + .or_else(|_| metadata.modified()) + .map(|t| DateTime::::from(t).to_rfc3339()) + .unwrap_or_else(|_| Utc::now().to_rfc3339()); + + let path_str = path.to_string_lossy().to_string(); + + let db = self.db.lock().expect("downloads db mutex poisoned"); + db.execute( + "INSERT OR REPLACE INTO downloads (path, name, extension, file_type, size_bytes, created_at) + VALUES (?1, ?2, ?3, ?4, ?5, ?6)", + params![path_str, name, extension, file_type, size_bytes, created_at], + )?; + + let id = db.last_insert_rowid(); + + Ok(Some(DownloadItem { + id, + path: path_str, + name, + file_type, + extension, + size_bytes, + created_at, + accessed_at: None, + is_complete: true, + })) + } + + pub fn get_items( + &self, + filter: &str, + search_term: Option<&str>, + limit: u32, + offset: u32, + ) -> Result, AppError> { + let db = self.db.lock().expect("downloads db mutex poisoned"); + + let extension_filter = match filter { + "images" => Some(vec![ + "jpg", "jpeg", "png", "gif", "webp", "svg", "bmp", "ico", + ]), + "videos" => Some(vec!["mp4", "mov", "avi", "mkv", "webm", "flv", "wmv"]), + "audio" => Some(vec!["mp3", "wav", "flac", "m4a", "ogg", "aac"]), + "documents" => Some(vec!["pdf", "doc", "docx", "txt", "md", "rtf", "odt"]), + "archives" => Some(vec!["zip", "tar", "gz", "7z", "rar", "bz2", "xz"]), + _ => None, + }; + + let mut sql = String::from( + "SELECT id, path, name, extension, file_type, size_bytes, created_at, accessed_at + FROM downloads WHERE 1=1", + ); + + let mut params_vec: Vec> = Vec::new(); + + if let Some(term) = search_term { + if !term.is_empty() { + sql.push_str(" AND name LIKE ?"); + params_vec.push(Box::new(format!("%{}%", term))); + } + } + + if let Some(exts) = &extension_filter { + let placeholders: Vec = exts.iter().map(|_| "?".to_string()).collect(); + sql.push_str(&format!(" AND extension IN ({})", placeholders.join(", "))); + for ext in exts { + params_vec.push(Box::new(ext.to_string())); + } + } + + sql.push_str(" ORDER BY created_at DESC LIMIT ? OFFSET ?"); + params_vec.push(Box::new(limit)); + params_vec.push(Box::new(offset)); + + let params_refs: Vec<&dyn rusqlite::ToSql> = + params_vec.iter().map(|p| p.as_ref()).collect(); + + let mut stmt = db.prepare(&sql)?; + let items_iter = stmt.query_map(params_refs.as_slice(), |row| { + Ok(DownloadItem { + id: row.get(0)?, + path: row.get(1)?, + name: row.get(2)?, + extension: row.get(3)?, + file_type: row.get(4)?, + size_bytes: row.get(5)?, + created_at: row.get(6)?, + accessed_at: row.get(7)?, + is_complete: true, + }) + })?; + + items_iter + .collect::>>() + .map_err(|e| e.into()) + } + + pub fn mark_accessed(&self, id: i64) -> Result<(), AppError> { + let db = self.db.lock().expect("downloads db mutex poisoned"); + let now = Utc::now().to_rfc3339(); + db.execute( + "UPDATE downloads SET accessed_at = ?1 WHERE id = ?2", + params![now, id], + )?; + Ok(()) + } + + pub fn delete_item(&self, id: i64) -> Result<(), AppError> { + let db = self.db.lock().expect("downloads db mutex poisoned"); + db.execute("DELETE FROM downloads WHERE id = ?1", params![id])?; + Ok(()) + } + + pub fn clear_all(&self) -> Result<(), AppError> { + let db = self.db.lock().expect("downloads db mutex poisoned"); + db.execute("DELETE FROM downloads", [])?; + Ok(()) + } + + /// Scan existing files in a directory and add them to the database + pub fn scan_directory(&self, dir: &Path) -> Result { + let entries = match fs::read_dir(dir) { + Ok(e) => e, + Err(_) => return Ok(0), + }; + + let mut count = 0; + for entry in entries.flatten() { + let path = entry.path(); + if self.add_download(&path)?.is_some() { + count += 1; + } + } + + Ok(count) + } + + /// Get the downloads directory path + pub fn get_downloads_dir() -> Option { + dirs::download_dir() + } +} + +// Global manager instance +use once_cell::sync::Lazy; +pub static MANAGER: Lazy>> = Lazy::new(|| Mutex::new(None)); diff --git a/src-tauri/src/downloads/mod.rs b/src-tauri/src/downloads/mod.rs new file mode 100644 index 00000000..a5632854 --- /dev/null +++ b/src-tauri/src/downloads/mod.rs @@ -0,0 +1,270 @@ +pub mod manager; +pub mod types; +pub mod watcher; + +use manager::{DownloadsManager, MANAGER}; +use std::fs; +use std::path::Path; +use tauri::AppHandle; +use types::DownloadItem; + +/// Initialize the downloads module +pub fn init(app_handle: AppHandle) { + // Create the manager + let downloads_manager = match DownloadsManager::new(&app_handle) { + Ok(m) => m, + Err(e) => { + tracing::error!(error = ?e, "Failed to create DownloadsManager"); + return; + } + }; + + // Initialize the database + if let Err(e) = downloads_manager.init_db() { + tracing::error!(error = ?e, "Failed to initialize downloads database"); + return; + } + + // Scan existing downloads on first run + if let Some(downloads_dir) = DownloadsManager::get_downloads_dir() { + match downloads_manager.scan_directory(&downloads_dir) { + Ok(count) => { + if count > 0 { + tracing::info!(count, "Indexed existing downloads"); + } + } + Err(e) => { + tracing::warn!(error = ?e, "Failed to scan existing downloads"); + } + } + } + + // Store the manager globally + *MANAGER.lock().expect("downloads manager mutex poisoned") = Some(downloads_manager); + + // Start the file watcher + let watcher_handle = app_handle.clone(); + std::thread::spawn(move || { + if let Err(e) = watcher::start_watching(watcher_handle) { + tracing::error!(error = %e, "Failed to start downloads watcher"); + } + }); + + tracing::info!("Downloads manager initialized"); +} + +// Tauri Commands + +#[tauri::command] +pub fn downloads_get_items( + filter: String, + search_term: Option, + limit: u32, + offset: u32, +) -> Result, String> { + if let Some(manager) = MANAGER + .lock() + .expect("downloads manager mutex poisoned") + .as_ref() + { + manager + .get_items(&filter, search_term.as_deref(), limit, offset) + .map_err(|e| e.to_string()) + } else { + Err("Downloads manager not initialized".to_string()) + } +} + +#[tauri::command] +pub fn downloads_open_file(path: String) -> Result<(), String> { + let path = Path::new(&path); + + if !path.exists() { + return Err("File not found".to_string()); + } + + #[cfg(target_os = "linux")] + { + std::process::Command::new("xdg-open") + .arg(path) + .spawn() + .map_err(|e| format!("Failed to open file: {}", e))?; + } + + #[cfg(target_os = "macos")] + { + std::process::Command::new("open") + .arg(path) + .spawn() + .map_err(|e| format!("Failed to open file: {}", e))?; + } + + #[cfg(target_os = "windows")] + { + std::process::Command::new("explorer") + .arg(path) + .spawn() + .map_err(|e| format!("Failed to open file: {}", e))?; + } + + // Mark as accessed + if let Some(manager) = MANAGER + .lock() + .expect("downloads manager mutex poisoned") + .as_ref() + { + // Find the item by path and mark it accessed + if let Ok(items) = manager.get_items("all", None, 1000, 0) { + if let Some(item) = items.iter().find(|i| i.path == path.to_string_lossy()) { + let _ = manager.mark_accessed(item.id); + } + } + } + + Ok(()) +} + +#[tauri::command] +pub fn downloads_show_in_folder(path: String) -> Result<(), String> { + let path = Path::new(&path); + + if !path.exists() { + return Err("File not found".to_string()); + } + + let parent = path.parent().unwrap_or(path); + + #[cfg(target_os = "linux")] + { + // Try to use the file manager to highlight the file + // First try with dbus/nautilus, fall back to xdg-open on parent + let result = std::process::Command::new("dbus-send") + .args([ + "--session", + "--dest=org.freedesktop.FileManager1", + "--type=method_call", + "/org/freedesktop/FileManager1", + "org.freedesktop.FileManager1.ShowItems", + &format!("array:string:file://{}", path.to_string_lossy()), + "string:", + ]) + .output(); + + if result.is_err() || !result.unwrap().status.success() { + // Fall back to just opening the folder + std::process::Command::new("xdg-open") + .arg(parent) + .spawn() + .map_err(|e| format!("Failed to open folder: {}", e))?; + } + } + + #[cfg(target_os = "macos")] + { + std::process::Command::new("open") + .args(["-R", &path.to_string_lossy()]) + .spawn() + .map_err(|e| format!("Failed to show in Finder: {}", e))?; + } + + #[cfg(target_os = "windows")] + { + std::process::Command::new("explorer") + .args(["/select,", &path.to_string_lossy()]) + .spawn() + .map_err(|e| format!("Failed to show in Explorer: {}", e))?; + } + + Ok(()) +} + +#[tauri::command] +pub fn downloads_delete_item(id: i64) -> Result<(), String> { + if let Some(manager) = MANAGER + .lock() + .expect("downloads manager mutex poisoned") + .as_ref() + { + manager.delete_item(id).map_err(|e| e.to_string()) + } else { + Err("Downloads manager not initialized".to_string()) + } +} + +#[tauri::command] +pub fn downloads_delete_file(id: i64, path: String) -> Result<(), String> { + let path = Path::new(&path); + + if path.exists() { + if path.is_dir() { + fs::remove_dir_all(path).map_err(|e| format!("Failed to delete directory: {}", e))?; + } else { + fs::remove_file(path).map_err(|e| format!("Failed to delete file: {}", e))?; + } + } + + // Also remove from history + if let Some(manager) = MANAGER + .lock() + .expect("downloads manager mutex poisoned") + .as_ref() + { + manager.delete_item(id).map_err(|e| e.to_string())?; + } + + Ok(()) +} + +#[tauri::command] +pub fn downloads_clear_history() -> Result<(), String> { + if let Some(manager) = MANAGER + .lock() + .expect("downloads manager mutex poisoned") + .as_ref() + { + manager.clear_all().map_err(|e| e.to_string()) + } else { + Err("Downloads manager not initialized".to_string()) + } +} + +/// Get the most recent download +#[tauri::command] +pub fn downloads_get_latest() -> Result, String> { + if let Some(manager) = MANAGER + .lock() + .expect("downloads manager mutex poisoned") + .as_ref() + { + // Get the first item sorted by created_at descending + manager + .get_items("all", None, 1, 0) + .map(|items| items.into_iter().next()) + .map_err(|e| e.to_string()) + } else { + Err("Downloads manager not initialized".to_string()) + } +} + +/// Copy the latest download path to clipboard +#[tauri::command] +pub fn downloads_copy_latest() -> Result { + if let Some(manager) = MANAGER + .lock() + .expect("downloads manager mutex poisoned") + .as_ref() + { + match manager.get_items("all", None, 1, 0) { + Ok(items) => { + if let Some(item) = items.first() { + Ok(item.path.clone()) + } else { + Err("No downloads found".to_string()) + } + } + Err(e) => Err(e.to_string()), + } + } else { + Err("Downloads manager not initialized".to_string()) + } +} diff --git a/src-tauri/src/downloads/types.rs b/src-tauri/src/downloads/types.rs new file mode 100644 index 00000000..dbf1928a --- /dev/null +++ b/src-tauri/src/downloads/types.rs @@ -0,0 +1,31 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct DownloadItem { + pub id: i64, + pub path: String, + pub name: String, + pub file_type: String, // "file" or "directory" + pub extension: Option, + pub size_bytes: i64, + pub created_at: String, // ISO 8601 timestamp + pub accessed_at: Option, + pub is_complete: bool, // false if still downloading (.crdownload, .part) +} + +/// File extensions that indicate an incomplete download +pub const INCOMPLETE_EXTENSIONS: &[&str] = &[ + "crdownload", // Chrome + "part", // Firefox, wget + "download", // Safari + "tmp", // Various + "partial", // Various +]; + +/// Check if a file extension indicates an incomplete download +pub fn is_incomplete_download(extension: Option<&str>) -> bool { + extension.map_or(false, |ext| { + INCOMPLETE_EXTENSIONS.contains(&ext.to_lowercase().as_str()) + }) +} diff --git a/src-tauri/src/downloads/watcher.rs b/src-tauri/src/downloads/watcher.rs new file mode 100644 index 00000000..95a3eb3c --- /dev/null +++ b/src-tauri/src/downloads/watcher.rs @@ -0,0 +1,103 @@ +use notify::{Config, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher}; +use std::path::PathBuf; +use std::sync::mpsc; +use std::time::Duration; +use tauri::AppHandle; + +use super::manager::MANAGER; + +/// Start watching the Downloads directory for new files +pub fn start_watching(_app_handle: AppHandle) -> Result<(), String> { + let downloads_dir = match dirs::download_dir() { + Some(dir) => dir, + None => { + tracing::warn!("Could not determine downloads directory"); + return Err("Could not determine downloads directory".to_string()); + } + }; + + if !downloads_dir.exists() { + tracing::warn!(path = %downloads_dir.display(), "Downloads directory does not exist"); + return Err("Downloads directory does not exist".to_string()); + } + + // Create a channel to receive events + let (tx, rx) = mpsc::channel(); + + // Create the watcher with a debounce of 500ms + let mut watcher: RecommendedWatcher = Watcher::new( + move |res: Result| { + if let Ok(event) = res { + let _ = tx.send(event); + } + }, + Config::default().with_poll_interval(Duration::from_secs(2)), + ) + .map_err(|e| format!("Failed to create watcher: {}", e))?; + + // Watch the downloads directory + watcher + .watch(&downloads_dir, RecursiveMode::NonRecursive) + .map_err(|e| format!("Failed to watch downloads directory: {}", e))?; + + tracing::info!(path = %downloads_dir.display(), "Watching downloads directory"); + + // Spawn a thread to handle events + std::thread::spawn(move || { + // Keep watcher alive + let _watcher = watcher; + + for event in rx { + handle_event(event); + } + }); + + Ok(()) +} + +fn handle_event(event: Event) { + // Only handle file creation and rename events + match event.kind { + EventKind::Create(_) | EventKind::Modify(notify::event::ModifyKind::Name(_)) => {} + _ => return, + } + + for path in event.paths { + // Skip if not a file + if !path.is_file() { + continue; + } + + // Skip hidden files + if let Some(name) = path.file_name().and_then(|n| n.to_str()) { + if name.starts_with('.') { + continue; + } + } + + tracing::debug!(path = %path.display(), "New download detected"); + + // Add to manager - use Ok pattern to handle poisoned mutex gracefully + if let Ok(guard) = MANAGER.lock() { + if let Some(manager) = guard.as_ref() { + match manager.add_download(&path) { + Ok(Some(item)) => { + tracing::info!(name = %item.name, "Added download to history"); + } + Ok(None) => { + // Skipped (incomplete download or error reading file) + } + Err(e) => { + tracing::error!(error = %e, path = %path.display(), "Failed to add download"); + } + } + } + } + } +} + +/// Get the downloads directory path +#[allow(dead_code)] +pub fn get_downloads_path() -> Option { + dirs::download_dir() +} diff --git a/src-tauri/src/extension_shims.rs b/src-tauri/src/extension_shims.rs new file mode 100644 index 00000000..facb71a2 --- /dev/null +++ b/src-tauri/src/extension_shims.rs @@ -0,0 +1,1106 @@ +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::PathBuf; +use std::process::Command; + +/// Provides Linux equivalents for macOS-specific APIs used in Raycast extensions +/// This module helps bridge the gap between macOS and Linux for extension compatibility + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ShimResult { + pub success: bool, + pub output: Option, + pub error: Option, +} + +/// Enhanced AppleScript command types for better parsing and execution +#[derive(Debug, Clone, PartialEq)] +#[allow(dead_code)] +pub enum AppleScriptCommand { + DoShellScript { + command: String, + needs_sudo: bool, + }, + TellApplication { + app: String, + command: AppCommand, + }, + DisplayNotification { + message: String, + title: String, + }, + SetVolume { + volume: i32, + }, + OpenLocation { + location: String, + }, + Keystroke { + text: String, + modifiers: Vec, + }, + KeyCode { + code: i32, + modifiers: Vec, + }, + Click { + x: Option, + y: Option, + }, + ReadFile { + path: String, + }, + WriteFile { + path: String, + content: String, + }, + SetClipboard { + text: String, + }, + GetClipboard, +} + +#[derive(Debug, Clone, PartialEq)] +#[allow(dead_code)] +pub enum AppCommand { + Activate, + Quit, + Open { path: String }, + GetURL, // Browser: get current tab URL + ExecuteJavaScript { code: String }, // Browser: execute JS + Reveal { path: String }, // Finder: reveal file + GetSelection, // Finder: get selected files +} + +#[derive(Debug, Clone, PartialEq)] +pub enum Modifier { + Command, // Super/Meta key on Linux + Control, + Option, // Alt key on Linux + Shift, +} + +#[derive(Debug, Clone)] +pub enum DisplayServer { + X11, + Wayland, + Unknown, +} + +/// Translates macOS paths to Linux equivalents +pub struct PathShim; + +impl PathShim { + /// Maps common macOS paths to their Linux equivalents + pub fn translate_path(macos_path: &str) -> String { + // Handle /Applications/ paths + if macos_path.starts_with("/Applications/") { + // Try to find the application in common Linux locations + let app_name = macos_path + .strip_prefix("/Applications/") + .unwrap_or(macos_path) + .trim_end_matches(".app") + .to_lowercase(); + + // Return the most likely Linux equivalent + // Extensions will need to use the desktop file system instead + return format!("/usr/share/applications/{}.desktop", app_name); + } + + // Handle /Library/ paths + if macos_path.starts_with("/Library/") { + let rest = macos_path.strip_prefix("/Library/").unwrap_or(""); + return format!("/usr/lib/{}", rest); + } + + // Handle ~/Library/ paths + if macos_path.starts_with("~/Library/") { + let rest = macos_path.strip_prefix("~/Library/").unwrap_or(""); + // Map to XDG directories where appropriate + if rest.starts_with("Application Support/") { + let app_rest = rest.strip_prefix("Application Support/").unwrap_or(""); + return format!("~/.local/share/{}", app_rest); + } + if rest.starts_with("Preferences/") { + let pref_rest = rest.strip_prefix("Preferences/").unwrap_or(""); + return format!("~/.config/{}", pref_rest); + } + return format!("~/.local/lib/{}", rest); + } + + // Handle /Users/ paths + if macos_path.starts_with("/Users/") { + return macos_path.replace("/Users/", "/home/"); + } + + // Return unchanged if no translation needed + macos_path.to_string() + } + + /// Expands ~ in paths to the actual home directory + pub fn expand_home(path: &str) -> PathBuf { + if path.starts_with("~/") { + if let Some(home) = dirs::home_dir() { + return home.join(path.strip_prefix("~/").unwrap_or(path)); + } + } + PathBuf::from(path) + } +} + +/// Provides shims for AppleScript functionality +pub struct AppleScriptShim; + +impl AppleScriptShim { + /// Attempts to translate and execute common AppleScript commands + pub fn run_apple_script(script: &str) -> ShimResult { + // Parse common AppleScript patterns and translate to Linux equivalents + + // Pattern: do shell script + if let Some((command, needs_sudo)) = Self::extract_shell_script(script) { + return Self::run_shell_script(&command, needs_sudo); + } + + // Pattern: open location + if let Some(location) = Self::extract_open_location(script) { + return Self::open_location(&location); + } + + // Pattern: tell application "AppName" to activate + if let Some(app_name) = Self::extract_activate_app(script) { + return Self::activate_application(&app_name); + } + + // Pattern: tell application "AppName" to quit + if let Some(app_name) = Self::extract_quit_app(script) { + return Self::quit_application(&app_name); + } + + // Pattern: display notification + if let Some((title, message)) = Self::extract_notification(script) { + return Self::show_notification(&title, &message); + } + + // Pattern: set volume + if let Some(volume) = Self::extract_set_volume(script) { + return Self::set_system_volume(volume); + } + + // Pattern: set clipboard + if let Some(text) = Self::extract_set_clipboard(script) { + return Self::set_clipboard(&text); + } + + // Pattern: get clipboard + if Self::is_get_clipboard(script) { + return Self::get_clipboard(); + } + + // Pattern: keystroke + if let Some((text, modifiers)) = Self::extract_keystroke(script) { + return Self::simulate_keystroke(&text, &modifiers); + } + + // Pattern: key code + if let Some((code, modifiers)) = Self::extract_keycode(script) { + return Self::simulate_keycode(code, &modifiers); + } + // If we can't translate, return an error + ShimResult { + success: false, + output: None, + error: Some(format!( + "AppleScript pattern not supported on Linux. Script: {}", + script + )), + } + } + + // ========== NEW PRIORITY 1 PARSERS ========== + + fn extract_shell_script(script: &str) -> Option<(String, bool)> { + // Match: do shell script "command" + // Also match: do shell script "command" with administrator privileges + let pattern = r#"do shell script "([^"]+)"(?:\s+with administrator privileges)?"#; + if let Some(caps) = regex::Regex::new(pattern).ok()?.captures(script) { + let command = caps.get(1)?.as_str().to_string(); + let needs_sudo = script.contains("with administrator privileges"); + return Some((command, needs_sudo)); + } + None + } + + fn extract_open_location(script: &str) -> Option { + // Match various open patterns + let patterns = [ + r#"open location "([^"]+)""#, + r#"open "([^"]+)""#, + r#"tell application "Finder" to open "([^"]+)""#, + ]; + + for pattern in &patterns { + if let Some(caps) = regex::Regex::new(pattern).ok()?.captures(script) { + return caps.get(1).map(|m| m.as_str().to_string()); + } + } + None + } + + fn extract_set_clipboard(script: &str) -> Option { + // Match: set the clipboard to "text" + let pattern = r#"set the clipboard to "([^"]+)""#; + regex::Regex::new(pattern) + .ok()? + .captures(script)? + .get(1) + .map(|m| m.as_str().to_string()) + } + + fn is_get_clipboard(script: &str) -> bool { + // Match "get the clipboard" but not "set the clipboard" + script.contains("get the clipboard") + || (script.contains("the clipboard") && !script.contains("set the clipboard")) + } + + fn extract_keystroke(script: &str) -> Option<(String, Vec)> { + // Match: tell application "System Events" to keystroke "text" + // Also match: tell application "System Events" to keystroke "text" using {command down, shift down} + let pattern = r#"keystroke "([^"]+)"(?:\s+using\s+\{([^}]+)\})?"#; + + if let Some(caps) = regex::Regex::new(pattern).ok()?.captures(script) { + let text = caps.get(1)?.as_str().to_string(); + let modifiers = if let Some(mods_str) = caps.get(2) { + Self::parse_modifiers(mods_str.as_str()) + } else { + Vec::new() + }; + return Some((text, modifiers)); + } + None + } + + fn extract_keycode(script: &str) -> Option<(i32, Vec)> { + // Match: tell application "System Events" to key code 36 + // Also match with modifiers: key code 36 using {command down} + let pattern = r"key code (\d+)(?:\s+using\s+\{([^}]+)\})?"; + + if let Some(caps) = regex::Regex::new(pattern).ok()?.captures(script) { + let code = caps.get(1)?.as_str().parse().ok()?; + let modifiers = if let Some(mods_str) = caps.get(2) { + Self::parse_modifiers(mods_str.as_str()) + } else { + Vec::new() + }; + return Some((code, modifiers)); + } + None + } + + fn parse_modifiers(mods_str: &str) -> Vec { + let mut modifiers = Vec::new(); + + if mods_str.contains("command down") || mods_str.contains("command_down") { + modifiers.push(Modifier::Command); + } + if mods_str.contains("control down") || mods_str.contains("control_down") { + modifiers.push(Modifier::Control); + } + if mods_str.contains("option down") + || mods_str.contains("option_down") + || mods_str.contains("alt down") + { + modifiers.push(Modifier::Option); + } + if mods_str.contains("shift down") || mods_str.contains("shift_down") { + modifiers.push(Modifier::Shift); + } + + modifiers + } + + // ========== NEW PRIORITY 1 EXECUTORS ========== + + fn run_shell_script(command: &str, needs_sudo: bool) -> ShimResult { + let mut cmd = if needs_sudo { + let mut c = Command::new("pkexec"); + c.arg("sh").arg("-c").arg(command); + c + } else { + let mut c = Command::new("sh"); + c.arg("-c").arg(command); + c + }; + + match cmd.output() { + Ok(output) => { + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + + ShimResult { + success: output.status.success(), + output: if !stdout.is_empty() { + Some(stdout) + } else { + None + }, + error: if !stderr.is_empty() { + Some(stderr) + } else { + None + }, + } + } + Err(e) => ShimResult { + success: false, + output: None, + error: Some(format!("Failed to execute shell script: {}", e)), + }, + } + } + + fn open_location(location: &str) -> ShimResult { + // Handle both URLs and file paths + let location_expanded = if location.starts_with("file://") { + PathShim::expand_home(&location[7..]) + .to_string_lossy() + .to_string() + } else if !location.starts_with("http://") && !location.starts_with("https://") { + PathShim::expand_home(location) + .to_string_lossy() + .to_string() + } else { + location.to_string() + }; + + match Command::new("xdg-open").arg(&location_expanded).output() { + Ok(output) if output.status.success() => ShimResult { + success: true, + output: Some(format!("Opened: {}", location)), + error: None, + }, + Ok(output) => ShimResult { + success: false, + output: None, + error: Some(String::from_utf8_lossy(&output.stderr).to_string()), + }, + Err(e) => ShimResult { + success: false, + output: None, + error: Some(format!("Failed to open location: {}", e)), + }, + } + } + + fn set_clipboard(text: &str) -> ShimResult { + // Use wl-copy for Wayland or xclip for X11 + let display_server = Self::detect_display_server(); + + let result = match display_server { + DisplayServer::Wayland => Command::new("wl-copy").arg(text).output(), + DisplayServer::X11 | DisplayServer::Unknown => { + // Try xclip first + let xclip_result = Command::new("xclip") + .arg("-selection") + .arg("clipboard") + .arg("-i") + .stdin(std::process::Stdio::piped()) + .spawn() + .and_then(|mut child| { + use std::io::Write; + if let Some(mut stdin) = child.stdin.take() { + stdin.write_all(text.as_bytes())?; + } + child.wait_with_output() + }); + + if xclip_result.is_ok() { + xclip_result + } else { + // Fallback to xsel + Command::new("xsel") + .arg("--clipboard") + .arg("--input") + .arg(text) + .output() + } + } + }; + + match result { + Ok(output) if output.status.success() => ShimResult { + success: true, + output: Some("Clipboard updated".to_string()), + error: None, + }, + _ => ShimResult { + success: false, + output: None, + error: Some( + "Failed to set clipboard. Install wl-copy (Wayland) or xclip/xsel (X11)" + .to_string(), + ), + }, + } + } + + fn get_clipboard() -> ShimResult { + let display_server = Self::detect_display_server(); + + let result = match display_server { + DisplayServer::Wayland => Command::new("wl-paste").output(), + DisplayServer::X11 | DisplayServer::Unknown => { + // Try xclip first + let xclip_result = Command::new("xclip") + .arg("-selection") + .arg("clipboard") + .arg("-o") + .output(); + + if xclip_result.is_ok() { + xclip_result + } else { + // Fallback to xsel + Command::new("xsel") + .arg("--clipboard") + .arg("--output") + .output() + } + } + }; + + match result { + Ok(output) if output.status.success() => ShimResult { + success: true, + output: Some(String::from_utf8_lossy(&output.stdout).to_string()), + error: None, + }, + _ => ShimResult { + success: false, + output: None, + error: Some( + "Failed to get clipboard. Install wl-paste (Wayland) or xclip/xsel (X11)" + .to_string(), + ), + }, + } + } + + fn detect_display_server() -> DisplayServer { + // Check if we're running on Wayland or X11 + if std::env::var("WAYLAND_DISPLAY").is_ok() { + DisplayServer::Wayland + } else if std::env::var("DISPLAY").is_ok() { + DisplayServer::X11 + } else { + DisplayServer::Unknown + } + } + + // ========== NEW PRIORITY 2 EXECUTORS (GUI AUTOMATION) ========== + + fn simulate_keystroke(text: &str, modifiers: &[Modifier]) -> ShimResult { + let display_server = Self::detect_display_server(); + + match display_server { + DisplayServer::Wayland => Self::simulate_keystroke_wayland(text, modifiers), + DisplayServer::X11 => Self::simulate_keystroke_x11(text, modifiers), + DisplayServer::Unknown => ShimResult { + success: false, + output: None, + error: Some("Cannot detect display server (X11/Wayland)".to_string()), + }, + } + } + + fn simulate_keystroke_x11(text: &str, modifiers: &[Modifier]) -> ShimResult { + // Build xdotool command + let mut cmd = Command::new("xdotool"); + + if modifiers.is_empty() { + // Simple text typing + cmd.arg("type").arg("--").arg(text); + } else { + // Key combination + let modifier_keys = Self::modifiers_to_x11_keys(modifiers); + let key_combo = if text.len() == 1 { + format!("{}+{}", modifier_keys, text) + } else { + // If text is multi-char, treat as key name + format!("{}+{}", modifier_keys, text) + }; + cmd.arg("key").arg("--").arg(key_combo); + } + + match cmd.output() { + Ok(output) if output.status.success() => ShimResult { + success: true, + output: Some("Keystroke simulated".to_string()), + error: None, + }, + Ok(output) => ShimResult { + success: false, + output: None, + error: Some(String::from_utf8_lossy(&output.stderr).to_string()), + }, + Err(_) => ShimResult { + success: false, + output: None, + error: Some( + "Failed to execute xdotool. Install with: sudo apt install xdotool".to_string(), + ), + }, + } + } + + fn simulate_keystroke_wayland(text: &str, modifiers: &[Modifier]) -> ShimResult { + // Build ydotool command + let mut cmd = Command::new("ydotool"); + + if modifiers.is_empty() { + // Simple text typing + cmd.arg("type").arg(text); + } else { + // Key combination - ydotool uses different approach + let modifier_keys = Self::modifiers_to_wayland_keys(modifiers); + cmd.arg("key").arg(format!("{}:{}", modifier_keys, text)); + } + + match cmd.output() { + Ok(output) if output.status.success() => ShimResult { + success: true, + output: Some("Keystroke simulated".to_string()), + error: None, + }, + Ok(output) => ShimResult { + success: false, + output: None, + error: Some(String::from_utf8_lossy(&output.stderr).to_string()), + }, + Err(_) => ShimResult { + success: false, + output: None, + error: Some( + "Failed to execute ydotool. Install with: sudo apt install ydotool".to_string(), + ), + }, + } + } + + fn simulate_keycode(code: i32, modifiers: &[Modifier]) -> ShimResult { + let display_server = Self::detect_display_server(); + + // Map macOS key codes to Linux equivalents + let linux_key = Self::macos_keycode_to_linux(code); + + match display_server { + DisplayServer::X11 => { + let modifier_keys = Self::modifiers_to_x11_keys(modifiers); + let key_combo = if modifier_keys.is_empty() { + linux_key.to_string() + } else { + format!("{}+{}", modifier_keys, linux_key) + }; + + match Command::new("xdotool") + .arg("key") + .arg("--") + .arg(key_combo) + .output() + { + Ok(output) if output.status.success() => ShimResult { + success: true, + output: Some("Key code simulated".to_string()), + error: None, + }, + _ => ShimResult { + success: false, + output: None, + error: Some("Failed to simulate key code".to_string()), + }, + } + } + DisplayServer::Wayland => ShimResult { + success: false, + output: None, + error: Some("Key code simulation not yet supported on Wayland".to_string()), + }, + DisplayServer::Unknown => ShimResult { + success: false, + output: None, + error: Some("Cannot detect display server".to_string()), + }, + } + } + + fn modifiers_to_x11_keys(modifiers: &[Modifier]) -> String { + let mut keys = Vec::new(); + for modifier in modifiers { + match modifier { + Modifier::Command => keys.push("super"), + Modifier::Control => keys.push("ctrl"), + Modifier::Option => keys.push("alt"), + Modifier::Shift => keys.push("shift"), + } + } + keys.join("+") + } + + fn modifiers_to_wayland_keys(modifiers: &[Modifier]) -> String { + let mut keys = Vec::new(); + for modifier in modifiers { + match modifier { + Modifier::Command => keys.push("125"), // Left Super key code + Modifier::Control => keys.push("29"), // Left Ctrl key code + Modifier::Option => keys.push("56"), // Left Alt key code + Modifier::Shift => keys.push("42"), // Left Shift key code + } + } + keys.join(":") + } + + fn macos_keycode_to_linux(code: i32) -> String { + // Map common macOS key codes to Linux key names + match code { + 36 => "Return".to_string(), + 51 => "BackSpace".to_string(), + 53 => "Escape".to_string(), + 48 => "Tab".to_string(), + 49 => "space".to_string(), + 123 => "Left".to_string(), + 124 => "Right".to_string(), + 125 => "Down".to_string(), + 126 => "Up".to_string(), + 116 => "Page_Up".to_string(), + 121 => "Page_Down".to_string(), + 115 => "Home".to_string(), + 119 => "End".to_string(), + 117 => "Delete".to_string(), + _ => format!("KEY_{}", code), // Fallback for unknown codes + } + } + // ========== EXISTING PARSERS (keeping for backwards compatibility) ========== + fn extract_activate_app(script: &str) -> Option { + // Match: tell application "AppName" to activate + let patterns = [ + r#"tell application "([^"]+)" to activate"#, + r#"activate application "([^"]+)""#, + ]; + + for pattern in &patterns { + if let Some(caps) = regex::Regex::new(pattern).ok()?.captures(script) { + return caps.get(1).map(|m| m.as_str().to_string()); + } + } + None + } + + fn extract_quit_app(script: &str) -> Option { + // Match: tell application "AppName" to quit + let pattern = r#"tell application "([^"]+)" to quit"#; + regex::Regex::new(pattern) + .ok()? + .captures(script)? + .get(1) + .map(|m| m.as_str().to_string()) + } + + fn extract_notification(script: &str) -> Option<(String, String)> { + // Match: display notification "message" with title "title" + let pattern = r#"display notification "([^"]+)"(?:\s+with title "([^"]+)")?"#; + let caps = regex::Regex::new(pattern).ok()?.captures(script)?; + + let message = caps.get(1)?.as_str().to_string(); + let title = caps + .get(2) + .map(|m| m.as_str().to_string()) + .unwrap_or_else(|| "Notification".to_string()); + + Some((title, message)) + } + + fn extract_set_volume(script: &str) -> Option { + // Match: set volume N or set volume output volume N + let patterns = [r"set volume (\d+)", r"set volume output volume (\d+)"]; + + for pattern in &patterns { + if let Some(caps) = regex::Regex::new(pattern).ok()?.captures(script) { + return caps.get(1)?.as_str().parse().ok(); + } + } + None + } + + fn activate_application(app_name: &str) -> ShimResult { + // Try to launch the application using the desktop file + let desktop_name = app_name.to_lowercase(); + + // Try using gtk-launch (works on most desktop environments) + let output = Command::new("gtk-launch").arg(&desktop_name).output(); + + match output { + Ok(out) if out.status.success() => ShimResult { + success: true, + output: Some(format!("Activated application: {}", app_name)), + error: None, + }, + _ => { + // Fallback: try xdg-open + let fallback = Command::new("xdg-open").arg(&desktop_name).output(); + + match fallback { + Ok(out) if out.status.success() => ShimResult { + success: true, + output: Some(format!("Activated application: {}", app_name)), + error: None, + }, + _ => ShimResult { + success: false, + output: None, + error: Some(format!("Failed to activate application: {}", app_name)), + }, + } + } + } + } + + fn quit_application(app_name: &str) -> ShimResult { + // Try to quit the application using pkill + let process_name = app_name.to_lowercase(); + + let output = Command::new("pkill").arg("-f").arg(&process_name).output(); + + match output { + Ok(out) if out.status.success() => ShimResult { + success: true, + output: Some(format!("Quit application: {}", app_name)), + error: None, + }, + _ => ShimResult { + success: false, + output: None, + error: Some(format!("Failed to quit application: {}", app_name)), + }, + } + } + + fn show_notification(title: &str, message: &str) -> ShimResult { + // Use notify-send for freedesktop notifications + let output = Command::new("notify-send").arg(title).arg(message).output(); + + match output { + Ok(out) if out.status.success() => ShimResult { + success: true, + output: Some("Notification sent".to_string()), + error: None, + }, + _ => ShimResult { + success: false, + output: None, + error: Some("Failed to send notification".to_string()), + }, + } + } + + fn set_system_volume(volume: i32) -> ShimResult { + // Clamp volume to 0-100 + let vol = volume.clamp(0, 100); + + // Try using pactl (PulseAudio/PipeWire) + let output = Command::new("pactl") + .arg("set-sink-volume") + .arg("@DEFAULT_SINK@") + .arg(format!("{}%", vol)) + .output(); + + match output { + Ok(out) if out.status.success() => ShimResult { + success: true, + output: Some(format!("Set volume to {}%", vol)), + error: None, + }, + _ => { + // Fallback: try amixer (ALSA) + let fallback = Command::new("amixer") + .arg("set") + .arg("Master") + .arg(format!("{}%", vol)) + .output(); + + match fallback { + Ok(out) if out.status.success() => ShimResult { + success: true, + output: Some(format!("Set volume to {}%", vol)), + error: None, + }, + _ => ShimResult { + success: false, + output: None, + error: Some("Failed to set volume".to_string()), + }, + } + } + } + } +} + +/// System API shims for common macOS system operations +pub struct SystemShim; + +impl SystemShim { + /// Get system information that might be requested by extensions + pub fn get_system_info() -> HashMap { + let mut info = HashMap::new(); + + // Platform + info.insert("platform".to_string(), "linux".to_string()); + + // Architecture + if let Ok(output) = Command::new("uname").arg("-m").output() { + if let Ok(arch) = String::from_utf8(output.stdout) { + info.insert("arch".to_string(), arch.trim().to_string()); + } + } + + // Hostname + if let Ok(output) = Command::new("hostname").output() { + if let Ok(hostname) = String::from_utf8(output.stdout) { + info.insert("hostname".to_string(), hostname.trim().to_string()); + } + } + + // Desktop environment + if let Ok(de) = std::env::var("XDG_CURRENT_DESKTOP") { + info.insert("desktop_environment".to_string(), de); + } + + info + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_path_translation_applications() { + assert_eq!( + PathShim::translate_path("/Applications/Safari.app"), + "/usr/share/applications/safari.desktop" + ); + } + + #[test] + fn test_path_translation_library() { + assert_eq!( + PathShim::translate_path("/Library/Frameworks/Something"), + "/usr/lib/Frameworks/Something" + ); + } + + #[test] + fn test_path_translation_users() { + assert_eq!( + PathShim::translate_path("/Users/john/Documents"), + "/home/john/Documents" + ); + } + + #[test] + fn test_path_translation_user_library() { + assert_eq!( + PathShim::translate_path("~/Library/Application Support/MyApp"), + "~/.local/share/MyApp" + ); + } + + #[test] + fn test_extract_activate_app() { + let script = r#"tell application "Safari" to activate"#; + assert_eq!( + AppleScriptShim::extract_activate_app(script), + Some("Safari".to_string()) + ); + } + + #[test] + fn test_extract_notification() { + let script = r#"display notification "Hello World" with title "Test""#; + assert_eq!( + AppleScriptShim::extract_notification(script), + Some(("Test".to_string(), "Hello World".to_string())) + ); + } + + // ========== NEW PRIORITY 1 TESTS ========== + + #[test] + fn test_extract_shell_script() { + let script = r#"do shell script "echo hello""#; + assert_eq!( + AppleScriptShim::extract_shell_script(script), + Some(("echo hello".to_string(), false)) + ); + } + + #[test] + fn test_extract_shell_script_with_sudo() { + let script = r#"do shell script "whoami" with administrator privileges"#; + assert_eq!( + AppleScriptShim::extract_shell_script(script), + Some(("whoami".to_string(), true)) + ); + } + + #[test] + fn test_run_shell_script() { + let result = AppleScriptShim::run_shell_script("echo hello", false); + assert!(result.success); + assert!(result.output.is_some()); + assert!(result.output.unwrap().contains("hello")); + } + + #[test] + fn test_extract_open_location_url() { + let script = r#"open location "https://google.com""#; + assert_eq!( + AppleScriptShim::extract_open_location(script), + Some("https://google.com".to_string()) + ); + } + + #[test] + fn test_extract_open_location_file() { + let script = r#"open "/tmp/test.txt""#; + assert_eq!( + AppleScriptShim::extract_open_location(script), + Some("/tmp/test.txt".to_string()) + ); + } + + #[test] + fn test_extract_open_finder() { + let script = r#"tell application "Finder" to open "/Users/test/Documents""#; + assert_eq!( + AppleScriptShim::extract_open_location(script), + Some("/Users/test/Documents".to_string()) + ); + } + + #[test] + fn test_extract_set_clipboard() { + let script = r#"set the clipboard to "hello world""#; + assert_eq!( + AppleScriptShim::extract_set_clipboard(script), + Some("hello world".to_string()) + ); + } + + #[test] + fn test_is_get_clipboard() { + assert!(AppleScriptShim::is_get_clipboard("get the clipboard")); + assert!(AppleScriptShim::is_get_clipboard("the clipboard")); + assert!(!AppleScriptShim::is_get_clipboard("set the clipboard")); + } + + #[test] + fn test_detect_display_server() { + // This test will pass regardless of what display server is running + let display = AppleScriptShim::detect_display_server(); + assert!(matches!( + display, + DisplayServer::X11 | DisplayServer::Wayland | DisplayServer::Unknown + )); + } + + // Integration test: end-to-end shell script execution + #[test] + fn test_run_apple_script_shell() { + let script = r#"do shell script "echo 'test output'""#; + let result = AppleScriptShim::run_apple_script(script); + assert!(result.success); + assert!(result.output.is_some()); + } + + // Integration test: notification fallback when pattern not supported + #[test] + fn test_run_apple_script_unsupported() { + let script = r#"tell application "SystemUIServer" to do something complex"#; + let result = AppleScriptShim::run_apple_script(script); + assert!(!result.success); + assert!(result.error.is_some()); + assert!(result.error.unwrap().contains("not supported")); + } + + // ========== NEW PRIORITY 2 TESTS (GUI AUTOMATION) ========== + + #[test] + fn test_extract_keystroke_simple() { + let script = r#"keystroke "hello""#; + assert_eq!( + AppleScriptShim::extract_keystroke(script), + Some(("hello".to_string(), Vec::new())) + ); + } + + #[test] + fn test_extract_keystroke_with_modifiers() { + let script = r#"keystroke "c" using {command down}"#; + let result = AppleScriptShim::extract_keystroke(script); + assert!(result.is_some()); + let (text, modifiers) = result.unwrap(); + assert_eq!(text, "c"); + assert_eq!(modifiers.len(), 1); + assert_eq!(modifiers[0], Modifier::Command); + } + + #[test] + fn test_extract_keystroke_multiple_modifiers() { + let script = r#"keystroke "v" using {command down, shift down}"#; + let result = AppleScriptShim::extract_keystroke(script); + assert!(result.is_some()); + let (text, modifiers) = result.unwrap(); + assert_eq!(text, "v"); + assert!(modifiers.contains(&Modifier::Command)); + assert!(modifiers.contains(&Modifier::Shift)); + } + + #[test] + fn test_extract_keycode() { + let script = r#"key code 36"#; + assert_eq!( + AppleScriptShim::extract_keycode(script), + Some((36, Vec::new())) + ); + } + + #[test] + fn test_extract_keycode_with_modifiers() { + let script = r#"key code 36 using {command down}"#; + let result = AppleScriptShim::extract_keycode(script); + assert!(result.is_some()); + let (code, modifiers) = result.unwrap(); + assert_eq!(code, 36); + assert_eq!(modifiers, vec![Modifier::Command]); + } + + #[test] + fn test_parse_modifiers() { + let mods = AppleScriptShim::parse_modifiers("command down, shift down"); + assert_eq!(mods.len(), 2); + assert!(mods.contains(&Modifier::Command)); + assert!(mods.contains(&Modifier::Shift)); + } + + #[test] + fn test_macos_keycode_to_linux() { + assert_eq!(AppleScriptShim::macos_keycode_to_linux(36), "Return"); + assert_eq!(AppleScriptShim::macos_keycode_to_linux(51), "BackSpace"); + assert_eq!(AppleScriptShim::macos_keycode_to_linux(53), "Escape"); + } + + #[test] + fn test_modifiers_to_x11_keys() { + let mods = vec![Modifier::Command, Modifier::Shift]; + assert_eq!(AppleScriptShim::modifiers_to_x11_keys(&mods), "super+shift"); + } +} diff --git a/src-tauri/src/extensions.rs b/src-tauri/src/extensions.rs index f8a27643..2e9d2a5e 100644 --- a/src-tauri/src/extensions.rs +++ b/src-tauri/src/extensions.rs @@ -7,11 +7,14 @@ use tauri::Manager; use zip::result::ZipError; use zip::ZipArchive; -#[derive(serde::Serialize, Clone)] +use crate::cli_substitutes; + +#[derive(serde::Serialize, serde::Deserialize, Clone, Debug)] #[serde(rename_all = "camelCase")] pub struct HeuristicViolation { - command_name: String, - reason: String, + pub command_name: String, + pub command_title: String, + pub reason: String, } #[derive(serde::Serialize, Clone)] @@ -22,15 +25,26 @@ pub enum InstallResult { } trait IncompatibilityHeuristic { - fn check(&self, command_title: &str, file_content: &str) -> Option; + fn check( + &self, + command_name: &str, + command_title: &str, + file_content: &str, + ) -> Option; } struct AppleScriptHeuristic; impl IncompatibilityHeuristic for AppleScriptHeuristic { - fn check(&self, command_title: &str, file_content: &str) -> Option { + fn check( + &self, + command_name: &str, + command_title: &str, + file_content: &str, + ) -> Option { if file_content.contains("runAppleScript") { Some(HeuristicViolation { - command_name: command_title.to_string(), + command_name: command_name.to_string(), + command_title: command_title.to_string(), reason: "Possible usage of AppleScript (runAppleScript)".to_string(), }) } else { @@ -41,12 +55,18 @@ impl IncompatibilityHeuristic for AppleScriptHeuristic { struct MacOSPathHeuristic; impl IncompatibilityHeuristic for MacOSPathHeuristic { - fn check(&self, command_title: &str, file_content: &str) -> Option { + fn check( + &self, + command_name: &str, + command_title: &str, + file_content: &str, + ) -> Option { let macos_paths = ["/Applications/", "/Library/", "/Users/"]; for path in macos_paths { if file_content.contains(path) { return Some(HeuristicViolation { - command_name: command_title.to_string(), + command_name: command_name.to_string(), + command_title: command_title.to_string(), reason: format!("Potential hardcoded macOS path: '{}'", path), }); } @@ -55,6 +75,91 @@ impl IncompatibilityHeuristic for MacOSPathHeuristic { } } +struct MacOSAPIHeuristic; +impl IncompatibilityHeuristic for MacOSAPIHeuristic { + fn check( + &self, + command_name: &str, + command_title: &str, + file_content: &str, + ) -> Option { + let macos_apis = [ + ("NSWorkspace", "macOS NSWorkspace API"), + ("NSApplication", "macOS NSApplication API"), + ("NSFileManager", "macOS NSFileManager API"), + ("com.apple.", "macOS-specific bundle identifier"), + ("tell app \"Finder\"", "macOS Finder AppleScript"), + ("tell application \"Finder\"", "macOS Finder AppleScript"), + ]; + + for (pattern, description) in macos_apis { + if file_content.contains(pattern) { + return Some(HeuristicViolation { + command_name: command_name.to_string(), + command_title: command_title.to_string(), + reason: format!("Uses {}", description), + }); + } + } + None + } +} + +struct ShellCommandHeuristic; +impl IncompatibilityHeuristic for ShellCommandHeuristic { + fn check( + &self, + command_name: &str, + command_title: &str, + file_content: &str, + ) -> Option { + let macos_commands = [ + ("osascript", "macOS osascript command"), + ("open -a", "macOS application launcher"), + ("mdfind", "macOS Spotlight search"), + ("mdls", "macOS Spotlight metadata"), + ("defaults read", "macOS preferences system"), + ("defaults write", "macOS preferences system"), + ]; + + for (pattern, description) in macos_commands { + if file_content.contains(pattern) { + return Some(HeuristicViolation { + command_name: command_name.to_string(), + command_title: command_title.to_string(), + reason: format!("Uses {}", description), + }); + } + } + None + } +} + +/// Magic bytes for detecting Mach-O binaries (macOS executables) +/// - MH_MAGIC (32-bit): 0xFEEDFACE +/// - MH_CIGAM (32-bit, byte-swapped): 0xCEFAEDFE +/// - MH_MAGIC_64 (64-bit): 0xFEEDFACF +/// - MH_CIGAM_64 (64-bit, byte-swapped): 0xCFFAEDFE +/// - FAT_MAGIC (universal binary): 0xCAFEBABE +/// - FAT_CIGAM (universal, byte-swapped): 0xBEBAFECA +const MACH_O_MAGIC_BYTES: &[[u8; 4]] = &[ + [0xFE, 0xED, 0xFA, 0xCE], // MH_MAGIC + [0xCE, 0xFA, 0xED, 0xFE], // MH_CIGAM + [0xFE, 0xED, 0xFA, 0xCF], // MH_MAGIC_64 + [0xCF, 0xFA, 0xED, 0xFE], // MH_CIGAM_64 + [0xCA, 0xFE, 0xBA, 0xBE], // FAT_MAGIC (universal binary) + [0xBE, 0xBA, 0xFE, 0xCA], // FAT_CIGAM +]; + +/// Check if the first 4 bytes of data indicate a Mach-O binary +fn is_macho_binary(data: &[u8]) -> bool { + if data.len() < 4 { + return false; + } + let header: [u8; 4] = [data[0], data[1], data[2], data[3]]; + MACH_O_MAGIC_BYTES.contains(&header) +} + fn get_extension_dir(app: &tauri::AppHandle, slug: &str) -> Result { let data_dir = app .path() @@ -100,10 +205,17 @@ fn find_common_prefix(file_names: &[PathBuf]) -> Option { }) } +#[derive(Clone)] +struct CommandToCheck { + path_in_archive: String, + command_name: String, + command_title: String, +} + fn get_commands_from_package_json( archive: &mut ZipArchive>, prefix: &Option, -) -> Result, String> { +) -> Result, String> { let package_json_path = if let Some(ref p) = prefix { p.join("package.json") } else { @@ -146,42 +258,195 @@ fn get_commands_from_package_json( PathBuf::from(src_path) }; - Some(( - command_file_path_in_archive.to_string_lossy().into_owned(), + Some(CommandToCheck { + path_in_archive: command_file_path_in_archive.to_string_lossy().into_owned(), + command_name: command_name.to_string(), command_title, - )) + }) }) .collect()) } -fn run_heuristic_checks(archive_data: &bytes::Bytes) -> Result, String> { - let heuristics: Vec> = - vec![Box::new(AppleScriptHeuristic), Box::new(MacOSPathHeuristic)]; - if heuristics.is_empty() { - return Ok(vec![]); - } +/// Result from heuristic checks, including detected Mach-O binaries for substitution +struct HeuristicResult { + violations: Vec, + macho_binaries: Vec, +} + +fn run_heuristic_checks(archive_data: &bytes::Bytes) -> Result { + let heuristics: Vec> = vec![ + Box::new(AppleScriptHeuristic), + Box::new(MacOSPathHeuristic), + Box::new(MacOSAPIHeuristic), + Box::new(ShellCommandHeuristic), + ]; let mut archive = ZipArchive::new(Cursor::new(archive_data.clone())).map_err(|e| e.to_string())?; let file_names: Vec = archive.file_names().map(PathBuf::from).collect(); let prefix = find_common_prefix(&file_names); - let commands_to_check = get_commands_from_package_json(&mut archive, &prefix)?; let mut violations = Vec::new(); - for (path_in_archive, command_title) in commands_to_check { - if let Ok(mut command_file) = archive.by_name(&path_in_archive) { + // Check for Mach-O binaries in assets folder + let mut macho_binaries_found: Vec = Vec::new(); + for i in 0..archive.len() { + if let Ok(mut file) = archive.by_index(i) { + let file_path = file.name().to_string(); + + // Skip directories and common non-binary files + if file.is_dir() + || file_path.ends_with(".js") + || file_path.ends_with(".json") + || file_path.ends_with(".md") + || file_path.ends_with(".txt") + || file_path.ends_with(".png") + || file_path.ends_with(".svg") + || file_path.ends_with(".jpg") + || file_path.ends_with(".gif") + || file_path.ends_with(".css") + || file_path.ends_with(".html") + { + continue; + } + + // Read first 4 bytes to check for Mach-O magic + let mut header = [0u8; 4]; + if file.read_exact(&mut header).is_ok() && is_macho_binary(&header) { + // Get just the filename for the warning message + let binary_name = Path::new(&file_path) + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or(&file_path) + .to_string(); + macho_binaries_found.push(binary_name); + } + } + } + + // Add a single violation for all Mach-O binaries found + if !macho_binaries_found.is_empty() { + let binary_list = if macho_binaries_found.len() <= 3 { + macho_binaries_found.join(", ") + } else { + format!( + "{} and {} more", + macho_binaries_found[..3].join(", "), + macho_binaries_found.len() - 3 + ) + }; + violations.push(HeuristicViolation { + command_name: "_extension".to_string(), + command_title: "Extension Assets".to_string(), + reason: format!( + "Contains macOS-only binary files that won't work on Linux: {}", + binary_list + ), + }); + } + + // Re-open archive for command checks (since we consumed it above) + let mut archive = + ZipArchive::new(Cursor::new(archive_data.clone())).map_err(|e| e.to_string())?; + + // Check command source files for incompatibility patterns + let commands_to_check = get_commands_from_package_json(&mut archive, &prefix)?; + for command_meta in commands_to_check { + if let Ok(mut command_file) = archive.by_name(&command_meta.path_in_archive) { let mut content = String::new(); if command_file.read_to_string(&mut content).is_ok() { for heuristic in &heuristics { - if let Some(violation) = heuristic.check(&command_title, &content) { + if let Some(violation) = heuristic.check( + &command_meta.command_name, + &command_meta.command_title, + &content, + ) { violations.push(violation); } } } } } - Ok(violations) + Ok(HeuristicResult { + violations, + macho_binaries: macho_binaries_found, + }) +} + +const COMPATIBILITY_FILE_NAME: &str = "compatibility.json"; + +#[derive(Serialize, Deserialize, Default)] +struct CompatibilityMetadata { + #[serde(default)] + warnings: Vec, + #[serde(default = "default_compatibility_score")] + compatibility_score: u8, +} + +fn default_compatibility_score() -> u8 { + 100 +} + +/// Calculate compatibility score (0-100) based on detected violations +/// Higher score = better Linux compatibility +fn calculate_compatibility_score(violations: &[HeuristicViolation]) -> u8 { + let mut score: i32 = 100; + + for violation in violations { + // Deduct points based on severity of the issue + if violation.reason.contains("macOS-only binary") { + // Mach-O binaries are a major blocker + score -= 40; + } else if violation.reason.contains("macOS NSWorkspace API") + || violation.reason.contains("macOS NSApplication API") + || violation.reason.contains("macOS NSFileManager API") + || violation.reason.contains("macOS Finder AppleScript") + { + // macOS-specific APIs likely won't work + score -= 20; + } else if violation.reason.contains("AppleScript") { + // AppleScript is shimmed but has limitations + score -= 15; + } else if violation.reason.contains("macOS path") { + // Paths can be translated + score -= 10; + } else if violation.reason.contains("osascript") + || violation.reason.contains("mdfind") + || violation.reason.contains("mdls") + || violation.reason.contains("defaults") + || violation.reason.contains("open -a") + { + // Shell commands are platform-specific + score -= 5; + } + } + + // Clamp to 0-100 range + score.max(0).min(100) as u8 +} + +fn save_compatibility_metadata( + plugin_dir: &Path, + warnings: &[HeuristicViolation], +) -> Result<(), String> { + let compatibility_score = calculate_compatibility_score(warnings); + let metadata = CompatibilityMetadata { + warnings: warnings.to_vec(), + compatibility_score, + }; + let data = serde_json::to_string_pretty(&metadata).map_err(|e| e.to_string())?; + fs::write(plugin_dir.join(COMPATIBILITY_FILE_NAME), data).map_err(|e| e.to_string()) +} + +fn load_compatibility_metadata(plugin_dir: &Path) -> Result { + let path = plugin_dir.join(COMPATIBILITY_FILE_NAME); + if !path.exists() { + return Ok(CompatibilityMetadata::default()); + } + + let data = fs::read_to_string(path).map_err(|e| e.to_string())?; + let parsed: CompatibilityMetadata = serde_json::from_str(&data).map_err(|e| e.to_string())?; + Ok(parsed) } fn extract_archive(archive_data: &bytes::Bytes, target_dir: &Path) -> Result<(), String> { @@ -310,6 +575,8 @@ pub struct PluginInfo { pub mode: Option, pub author: Option, pub owner: Option, + pub compatibility_warnings: Option>, + pub compatibility_score: Option, } pub fn discover_plugins(app: &tauri::AppHandle) -> Result, String> { @@ -337,16 +604,17 @@ pub fn discover_plugins(app: &tauri::AppHandle) -> Result, Strin let package_json_path = plugin_dir.join("package.json"); if !package_json_path.exists() { - eprintln!("Plugin {} has no package.json, skipping", plugin_dir_name); + tracing::warn!(plugin = %plugin_dir_name, "Plugin has no package.json, skipping"); continue; } let package_json_content = match fs::read_to_string(&package_json_path) { Ok(content) => content, Err(e) => { - eprintln!( - "Error reading package.json for plugin {}: {}", - plugin_dir_name, e + tracing::warn!( + plugin = %plugin_dir_name, + error = %e, + "Error reading package.json for plugin" ); continue; } @@ -355,18 +623,37 @@ pub fn discover_plugins(app: &tauri::AppHandle) -> Result, Strin let package_json: PackageJson = match serde_json::from_str(&package_json_content) { Ok(json) => json, Err(e) => { - eprintln!( - "Error parsing package.json for plugin {}: {}", - plugin_dir_name, e + tracing::warn!( + plugin = %plugin_dir_name, + error = %e, + "Error parsing package.json for plugin" ); continue; } }; + let compatibility_metadata = match load_compatibility_metadata(&plugin_dir) { + Ok(data) => data, + Err(err) => { + tracing::warn!( + plugin = %plugin_dir_name, + error = %err, + "Failed to load compatibility metadata" + ); + CompatibilityMetadata::default() + } + }; + if let Some(commands) = package_json.commands { for command in commands { let command_file_path = plugin_dir.join(format!("{}.js", command.name)); if command_file_path.exists() { + let warnings: Vec = compatibility_metadata + .warnings + .iter() + .filter(|warning| warning.command_name == command.name) + .cloned() + .collect(); let plugin_info = PluginInfo { title: command .title @@ -391,13 +678,19 @@ pub fn discover_plugins(app: &tauri::AppHandle) -> Result, Strin mode: command.mode, author: package_json.author.clone(), owner: package_json.owner.clone(), + compatibility_warnings: if warnings.is_empty() { + None + } else { + Some(warnings) + }, + compatibility_score: Some(compatibility_metadata.compatibility_score), }; plugins.push(plugin_info); } else { - eprintln!( - "Command file {} not found for command {}", - command_file_path.display(), - command.name + tracing::warn!( + command = %command.name, + path = %command_file_path.display(), + "Command file not found" ); } } @@ -417,14 +710,112 @@ pub async fn install_extension( let extension_dir = get_extension_dir(&app, &slug)?; let content = download_archive(&download_url).await?; - if !force { - let violations = run_heuristic_checks(&content)?; - if !violations.is_empty() { - return Ok(InstallResult::RequiresConfirmation { violations }); - } + let heuristic_result = run_heuristic_checks(&content)?; + if !heuristic_result.violations.is_empty() && !force { + return Ok(InstallResult::RequiresConfirmation { + violations: heuristic_result.violations.clone(), + }); } extract_archive(&content, &extension_dir)?; + // Attempt to substitute macOS binaries with Linux equivalents + if !heuristic_result.macho_binaries.is_empty() { + match cli_substitutes::substitute_macos_binaries( + &extension_dir, + &heuristic_result.macho_binaries, + ) + .await + { + Ok(substituted) => { + if !substituted.is_empty() { + tracing::info!( + count = substituted.len(), + "Successfully substituted macOS binaries with Linux versions" + ); + } + } + Err(e) => { + tracing::warn!(error = %e, "Failed to substitute some binaries"); + } + } + } + + save_compatibility_metadata(&extension_dir, &heuristic_result.violations)?; + Ok(InstallResult::Success) } + +#[derive(Serialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct CompatibilityInfo { + pub slug: String, + pub compatibility_score: u8, + pub warnings: Vec, +} + +#[tauri::command] +pub fn get_extension_compatibility( + app: tauri::AppHandle, + slug: String, +) -> Result { + let extension_dir = get_extension_dir(&app, &slug)?; + let metadata = load_compatibility_metadata(&extension_dir)?; + + Ok(CompatibilityInfo { + slug, + compatibility_score: metadata.compatibility_score, + warnings: metadata.warnings, + }) +} + +#[tauri::command] +pub fn get_all_extensions_compatibility( + app: tauri::AppHandle, +) -> Result, String> { + let plugins_base_dir = get_extension_dir(&app, "")?; + let mut results = Vec::new(); + + if !plugins_base_dir.exists() { + return Ok(results); + } + + let plugin_dirs = fs::read_dir(plugins_base_dir) + .map_err(|e| e.to_string())? + .filter_map(Result::ok) + .filter(|entry| entry.path().is_dir()); + + for plugin_dir_entry in plugin_dirs { + let plugin_dir = plugin_dir_entry.path(); + let slug = plugin_dir + .file_name() + .and_then(|s| s.to_str()) + .unwrap_or_default() + .to_string(); + + if let Ok(metadata) = load_compatibility_metadata(&plugin_dir) { + results.push(CompatibilityInfo { + slug, + compatibility_score: metadata.compatibility_score, + warnings: metadata.warnings, + }); + } + } + + Ok(results) +} + +#[tauri::command] +pub fn uninstall_extension(app: tauri::AppHandle, slug: String) -> Result<(), String> { + let extension_dir = get_extension_dir(&app, &slug)?; + + if !extension_dir.exists() { + return Err(format!("Extension '{}' is not installed", slug)); + } + + fs::remove_dir_all(&extension_dir) + .map_err(|e| format!("Failed to uninstall extension: {}", e))?; + + tracing::info!(slug = %slug, "Extension uninstalled successfully"); + Ok(()) +} diff --git a/src-tauri/src/file_search/indexer.rs b/src-tauri/src/file_search/indexer.rs index 15509bd3..e4c1e6e9 100644 --- a/src-tauri/src/file_search/indexer.rs +++ b/src-tauri/src/file_search/indexer.rs @@ -1,77 +1,132 @@ use super::{manager::FileSearchManager, types::IndexedFile}; -use std::{env, time::SystemTime}; +use std::{env, path::PathBuf, time::SystemTime}; use tauri::{AppHandle, Manager}; use walkdir::{DirEntry, WalkDir}; pub async fn build_initial_index(app_handle: AppHandle) { - println!("Starting initial file index build."); + tracing::info!("Starting initial file index build"); let manager = app_handle.state::(); let home_dir = match env::var("HOME") { Ok(path) => path, Err(e) => { - eprintln!("Failed to get home directory: {}", e); + tracing::error!(error = %e, "Failed to get home directory"); return; } }; - let walker = WalkDir::new(home_dir).into_iter(); - for entry in walker.filter_entry(|e| !is_hidden(e) && !is_excluded(e)) { - let entry = match entry { - Ok(entry) => entry, - Err(e) => { - eprintln!("Error walking directory: {}", e); - continue; + // Index only specific directories, not entire home + let index_dirs = [ + "Documents", + "Downloads", + "Desktop", + "Pictures", + "Videos", + "Music", + "Projects", + "Code", + "dev", + "workspace", + ]; + + // Load all existing file timestamps in a single query to avoid N+1 problem + let existing_files = match manager.get_all_file_timestamps() { + Ok(timestamps) => timestamps, + Err(e) => { + tracing::error!(error = %e, "Failed to load existing file timestamps"); + std::collections::HashMap::new() + } + }; + + let mut total_indexed = 0; + for dir_name in &index_dirs { + let dir_path = PathBuf::from(&home_dir).join(dir_name); + if !dir_path.exists() || !dir_path.is_dir() { + continue; + } + + tracing::info!(path = %dir_path.display(), "Indexing directory"); + + // Collect files to add in batches for better performance + let mut files_to_add = Vec::new(); + + let walker = WalkDir::new(&dir_path).into_iter(); + for entry in walker.filter_entry(|e| !is_hidden(e) && !is_excluded(e)) { + let entry = match entry { + Ok(entry) => entry, + Err(e) => { + tracing::warn!(error = %e, "Error walking directory"); + continue; + } + }; + + let path = entry.path(); + let metadata = match entry.metadata() { + Ok(meta) => meta, + Err(_) => continue, + }; + + let last_modified_secs = metadata + .modified() + .unwrap_or(SystemTime::UNIX_EPOCH) + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap_or_default() + .as_secs() as i64; + + // Use in-memory HashMap lookup instead of database query + if let Some(&indexed_time) = existing_files.get(&path.to_string_lossy().to_string()) { + if indexed_time >= last_modified_secs { + if path.is_dir() { + // continue to walk children + } else { + // skip this file + continue; + } + } } - }; - - let path = entry.path(); - let metadata = match entry.metadata() { - Ok(meta) => meta, - Err(_) => continue, - }; - - let last_modified_secs = metadata - .modified() - .unwrap_or(SystemTime::UNIX_EPOCH) - .duration_since(SystemTime::UNIX_EPOCH) - .unwrap_or_default() - .as_secs() as i64; - - if let Ok(Some(indexed_time)) = manager.get_file_last_modified(&path.to_string_lossy()) { - if indexed_time >= last_modified_secs { - if path.is_dir() { - // continue to walk children + + let file_type = if metadata.is_dir() { + "directory".to_string() + } else if metadata.is_file() { + "file".to_string() + } else { + continue; + }; + + let indexed_file = IndexedFile { + path: path.to_string_lossy().to_string(), + name: entry.file_name().to_string_lossy().to_string(), + parent_path: path + .parent() + .map(|p| p.to_string_lossy().to_string()) + .unwrap_or_default(), + file_type, + last_modified: last_modified_secs, + }; + + files_to_add.push(indexed_file); + + // Batch insert every 1000 files to avoid holding too much memory + if files_to_add.len() >= 1000 { + if let Err(e) = manager.batch_add_files(&files_to_add) { + tracing::error!(error = ?e, "Failed to batch add files"); } else { - // skip this file - continue; + total_indexed += files_to_add.len(); } + files_to_add.clear(); } } - let file_type = if metadata.is_dir() { - "directory".to_string() - } else if metadata.is_file() { - "file".to_string() - } else { - continue; - }; - - let indexed_file = IndexedFile { - path: path.to_string_lossy().to_string(), - name: entry.file_name().to_string_lossy().to_string(), - parent_path: path - .parent() - .map(|p| p.to_string_lossy().to_string()) - .unwrap_or_default(), - file_type, - last_modified: last_modified_secs, - }; - - if let Err(e) = manager.add_file(&indexed_file) { - eprintln!("Failed to add file to index: {:?}", e); + // Insert any remaining files + if !files_to_add.is_empty() { + if let Err(e) = manager.batch_add_files(&files_to_add) { + tracing::error!(error = ?e, "Failed to batch add remaining files"); + } else { + total_indexed += files_to_add.len(); + } } } - println!("Finished initial file index build."); + + tracing::info!(count = total_indexed, "Finished initial file index build"); } fn is_hidden(entry: &DirEntry) -> bool { @@ -87,18 +142,36 @@ fn is_excluded(entry: &DirEntry) -> bool { let excluded_dirs = [ "node_modules", ".git", + ".svn", "target", + "build", ".vscode", ".idea", "__pycache__", + ".pytest_cache", + ".mypy_cache", ".cache", + ".local/share/Trash", + ".gradle", + ".wine", + ".wine-qoder", + ".npm", + ".cargo", + ".rustup", + ".pnpm-store", + "venv", + ".venv", "Library", "Application Support", "AppData", ]; path.components().any(|component| { - excluded_dirs - .iter() - .any(|&excluded| component.as_os_str() == excluded) + if let Some(name) = component.as_os_str().to_str() { + excluded_dirs + .iter() + .any(|&excluded| name == excluded || name.starts_with(&format!("{}.", excluded))) + } else { + false + } }) } diff --git a/src-tauri/src/file_search/manager.rs b/src-tauri/src/file_search/manager.rs index 700990c7..78b938c4 100644 --- a/src-tauri/src/file_search/manager.rs +++ b/src-tauri/src/file_search/manager.rs @@ -32,7 +32,7 @@ impl FileSearchManager { } pub fn init_db(&self) -> RusqliteResult<()> { - let db = self.db.lock().unwrap(); + let db = self.db.lock().expect("file search db mutex poisoned"); db.execute( "CREATE TABLE IF NOT EXISTS file_index ( @@ -83,7 +83,7 @@ impl FileSearchManager { } pub fn add_file(&self, file: &IndexedFile) -> Result<(), AppError> { - let db = self.db.lock().unwrap(); + let db = self.db.lock().expect("file search db mutex poisoned"); db.execute( "INSERT OR REPLACE INTO file_index (path, name, parent_path, file_type, last_modified) VALUES (?1, ?2, ?3, ?4, ?5)", @@ -98,14 +98,45 @@ impl FileSearchManager { Ok(()) } + /// Batch add files in a single transaction for much better performance + pub fn batch_add_files(&self, files: &[IndexedFile]) -> Result<(), AppError> { + if files.is_empty() { + return Ok(()); + } + + let mut db = self.db.lock().expect("file search db mutex poisoned"); + let tx = db.transaction()?; + + { + let mut stmt = tx.prepare( + "INSERT OR REPLACE INTO file_index (path, name, parent_path, file_type, last_modified) + VALUES (?1, ?2, ?3, ?4, ?5)" + )?; + + for file in files { + stmt.execute(params![ + file.path, + file.name, + file.parent_path, + file.file_type, + file.last_modified + ])?; + } + } + + tx.commit()?; + Ok(()) + } + pub fn remove_file(&self, path: &str) -> Result<(), AppError> { - let db = self.db.lock().unwrap(); + let db = self.db.lock().expect("file search db mutex poisoned"); db.execute("DELETE FROM file_index WHERE path = ?1", params![path])?; Ok(()) } + #[allow(dead_code)] pub fn get_file_last_modified(&self, path: &str) -> Result, AppError> { - let db = self.db.lock().unwrap(); + let db = self.db.lock().expect("file search db mutex poisoned"); let last_modified: Result, rusqlite::Error> = db .query_row( "SELECT last_modified FROM file_index WHERE path = ?1", @@ -117,8 +148,28 @@ impl FileSearchManager { Ok(last_modified?) } + /// Get all file timestamps in a single query to avoid N+1 problem during indexing + pub fn get_all_file_timestamps( + &self, + ) -> Result, AppError> { + let db = self.db.lock().expect("file search db mutex poisoned"); + let mut stmt = db.prepare("SELECT path, last_modified FROM file_index")?; + + let timestamps_iter = stmt.query_map([], |row| { + Ok((row.get::<_, String>(0)?, row.get::<_, i64>(1)?)) + })?; + + let mut timestamps = std::collections::HashMap::new(); + for result in timestamps_iter { + let (path, last_modified) = result?; + timestamps.insert(path, last_modified); + } + + Ok(timestamps) + } + pub fn search_files(&self, term: &str, limit: u32) -> Result, AppError> { - let db = self.db.lock().unwrap(); + let db = self.db.lock().expect("file search db mutex poisoned"); let mut stmt = db.prepare( "SELECT t1.path, t1.name, t1.parent_path, t1.file_type, t1.last_modified FROM file_index t1 JOIN file_index_fts t2 ON t1.rowid = t2.rowid diff --git a/src-tauri/src/file_search/mod.rs b/src-tauri/src/file_search/mod.rs index f41046e8..9860e230 100644 --- a/src-tauri/src/file_search/mod.rs +++ b/src-tauri/src/file_search/mod.rs @@ -18,13 +18,13 @@ pub fn init(app_handle: AppHandle) { let file_search_manager = match FileSearchManager::new(app_handle.clone()) { Ok(manager) => manager, Err(e) => { - eprintln!("Failed to create FileSearchManager: {:?}", e); + tracing::error!(error = ?e, "Failed to create FileSearchManager"); return; } }; if let Err(e) = file_search_manager.init_db() { - eprintln!("Failed to initialize file search database: {:?}", e); + tracing::error!(error = ?e, "Failed to initialize file search database"); return; } @@ -38,7 +38,7 @@ pub fn init(app_handle: AppHandle) { let watcher_handle = app_handle.clone(); tauri::async_runtime::spawn(async move { if let Err(e) = watcher::start_watching(watcher_handle).await { - eprintln!("Failed to start file watcher: {:?}", e); + tracing::error!(error = ?e, "Failed to start file watcher"); } }); } diff --git a/src-tauri/src/file_search/watcher.rs b/src-tauri/src/file_search/watcher.rs index aec33748..99d44dfc 100644 --- a/src-tauri/src/file_search/watcher.rs +++ b/src-tauri/src/file_search/watcher.rs @@ -4,15 +4,57 @@ use notify::{RecursiveMode, Watcher}; use notify_debouncer_full::{new_debouncer, DebounceEventResult, DebouncedEvent}; use std::{ env, - path::PathBuf, + path::{Path, PathBuf}, time::{Duration, SystemTime}, }; use tauri::{AppHandle, Manager}; +/// Directories to exclude from file watching +const EXCLUDED_DIRS: &[&str] = &[ + ".wine", + ".wine-qoder", + ".cache", + ".local/share/Trash", + ".gradle", + "node_modules", + ".npm", + ".cargo", + ".rustup", + ".pnpm-store", + "target", + "build", + ".git", + ".svn", + ".venv", + "__pycache__", + ".pytest_cache", + ".mypy_cache", + "venv", +]; + +/// Check if a path should be excluded from watching +fn should_exclude_path(path: &Path) -> bool { + path.components().any(|component| { + if let std::path::Component::Normal(os_str) = component { + if let Some(name) = os_str.to_str() { + return EXCLUDED_DIRS.iter().any(|excluded| { + name == *excluded || name.starts_with(&format!("{}.", excluded)) + }); + } + } + false + }) +} + async fn handle_event(app_handle: AppHandle, debounced_event: DebouncedEvent) { let manager = app_handle.state::(); let path = &debounced_event.event.paths[0]; + // Skip excluded paths + if should_exclude_path(path) { + return; + } + if path.exists() { if let Ok(metadata) = path.metadata() { let file_type = if metadata.is_dir() { @@ -41,18 +83,18 @@ async fn handle_event(app_handle: AppHandle, debounced_event: DebouncedEvent) { last_modified, }; if let Err(e) = manager.add_file(&indexed_file) { - eprintln!( - "Failed to add/update file in index: {:?}, path: {}", - e, - path.display() + tracing::error!( + error = ?e, + path = %path.display(), + "Failed to add/update file in index" ); } } } else if let Err(e) = manager.remove_file(&path.to_string_lossy()) { - eprintln!( - "Failed to remove file from index: {:?}, path: {}", - e, - path.display() + tracing::error!( + error = ?e, + path = %path.display(), + "Failed to remove file from index" ); } } @@ -74,7 +116,7 @@ pub async fn start_watching(app_handle: AppHandle) -> Result<(), AppError> { } Err(errors) => { for error in errors { - eprintln!("watch error: {:?}", error); + tracing::error!(error = ?error, "File watch error"); } } } @@ -82,14 +124,43 @@ pub async fn start_watching(app_handle: AppHandle) -> Result<(), AppError> { ) .map_err(|e| AppError::FileSearch(e.to_string()))?; - debouncer - .watcher() - .watch(&PathBuf::from(&home_dir), RecursiveMode::Recursive) - .map_err(|e| AppError::FileSearch(e.to_string()))?; + // Watch only specific common directories instead of entire home + let watch_dirs = [ + "Documents", + "Downloads", + "Desktop", + "Pictures", + "Videos", + "Music", + "Projects", + "Code", + "dev", + "workspace", + ]; - debouncer - .cache() - .add_root(&PathBuf::from(&home_dir), RecursiveMode::Recursive); + let mut watch_count = 0; + for dir_name in &watch_dirs { + let dir_path = PathBuf::from(&home_dir).join(dir_name); + if dir_path.exists() && dir_path.is_dir() { + if let Err(e) = debouncer + .watcher() + .watch(&dir_path, RecursiveMode::Recursive) + { + tracing::error!(error = ?e, path = %dir_path.display(), "Failed to watch directory"); + } else { + debouncer + .cache() + .add_root(&dir_path, RecursiveMode::Recursive); + watch_count += 1; + } + } + } + + if watch_count == 0 { + tracing::warn!("No directories are being watched for file search"); + } else { + tracing::info!(count = watch_count, "Watching directories for file changes"); + } app_handle.manage(debouncer); diff --git a/src-tauri/src/floating_notes.rs b/src-tauri/src/floating_notes.rs new file mode 100644 index 00000000..e0267424 --- /dev/null +++ b/src-tauri/src/floating_notes.rs @@ -0,0 +1,103 @@ +use crate::error::AppError; +use rusqlite::params; +use std::sync::{Arc, Mutex}; +use tauri::{AppHandle, Manager, State, WebviewUrl, WebviewWindowBuilder}; + +const NOTES_SCHEMA: &str = "CREATE TABLE IF NOT EXISTS floating_notes ( + id TEXT PRIMARY KEY, + content TEXT NOT NULL, + updated_at INTEGER NOT NULL +)"; + +pub struct FloatingNotesManager { + store: Arc>, +} + +impl FloatingNotesManager { + pub fn new(app_handle: &AppHandle) -> Result { + let app_dir = app_handle + .path() + .app_local_data_dir() + .map_err(|_| AppError::DirectoryNotFound)?; + + if !app_dir.exists() { + std::fs::create_dir_all(&app_dir)?; + } + + let db_path = app_dir.join("floating_notes.db"); + let store = rusqlite::Connection::open(db_path)?; + + store.execute(NOTES_SCHEMA, [])?; + + Ok(Self { + store: Arc::new(Mutex::new(store)), + }) + } + + pub fn get_content(&self) -> Result { + let store = self.store.lock().expect("notes store mutex poisoned"); + let mut stmt = store.prepare("SELECT content FROM floating_notes WHERE id = 'main'")?; + + let result = stmt.query_row([], |row| row.get::<_, String>(0)); + + match result { + Ok(content) => Ok(content), + Err(rusqlite::Error::QueryReturnedNoRows) => Ok(String::new()), + Err(e) => Err(AppError::from(e)), + } + } + + pub fn save_content(&self, content: String) -> Result<(), AppError> { + let store = self.store.lock().expect("notes store mutex poisoned"); + let now = chrono::Utc::now().timestamp(); + + store.execute( + "INSERT OR REPLACE INTO floating_notes (id, content, updated_at) VALUES ('main', ?1, ?2)", + params![content, now], + )?; + Ok(()) + } +} + +#[tauri::command] +pub fn get_floating_note(manager: State) -> Result { + manager.get_content().map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn save_floating_note( + manager: State, + content: String, +) -> Result<(), String> { + manager.save_content(content).map_err(|e| e.to_string()) +} + +#[tauri::command] +pub async fn toggle_floating_notes_window(app: AppHandle) -> Result<(), String> { + if let Some(window) = app.get_webview_window("floating-notes") { + if window.is_visible().unwrap_or(false) { + window.hide().map_err(|e| e.to_string())?; + } else { + window.show().map_err(|e| e.to_string())?; + window.set_focus().map_err(|e| e.to_string())?; + } + } else { + // Create the window + let window = WebviewWindowBuilder::new( + &app, + "floating-notes", + WebviewUrl::App("/floating-notes".into()), + ) + .title("Floating Notes") + .inner_size(300.0, 400.0) + .min_inner_size(200.0, 200.0) + .always_on_top(true) + .decorations(false) // Frameless + .transparent(true) + .build() + .map_err(|e| e.to_string())?; + + // window.set_position(...) could be restored here if we stored it + } + Ok(()) +} diff --git a/src-tauri/src/hotkey_manager.rs b/src-tauri/src/hotkey_manager.rs new file mode 100644 index 00000000..b109b416 --- /dev/null +++ b/src-tauri/src/hotkey_manager.rs @@ -0,0 +1,529 @@ +use rusqlite::{params, Connection}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::sync::{Arc, Mutex}; +use tauri::{AppHandle, Emitter, Manager}; +use tauri_plugin_global_shortcut::{Code, GlobalShortcutExt, Modifiers, Shortcut, ShortcutState}; + +/// Hotkey configuration stored in database +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct HotkeyConfig { + pub command_id: String, + pub hotkey: String, // Display format: "Ctrl+Alt+←" + pub modifiers: u8, // Bitmask: 1=Ctrl, 2=Alt, 4=Shift, 8=Super + pub key: String, // Key code: "ArrowLeft", "KeyV", etc. +} + +/// Hotkey manager handles registration and persistence +pub struct HotkeyManager { + store: Arc>, + registered: Arc>>, +} + +impl HotkeyManager { + /// Create new hotkey manager and initialize database + pub fn new(app_handle: &AppHandle) -> Result { + let app_dir = app_handle + .path() + .app_data_dir() + .map_err(|e| format!("Failed to get app data dir: {}", e))?; + + std::fs::create_dir_all(&app_dir) + .map_err(|e| format!("Failed to create app data dir: {}", e))?; + + let db_path = app_dir.join("hotkeys.db"); + let store = Connection::open(&db_path) + .map_err(|e| format!("Failed to open hotkeys database: {}", e))?; + + // Create table if not exists + store + .execute( + "CREATE TABLE IF NOT EXISTS hotkeys ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + command_id TEXT NOT NULL UNIQUE, + hotkey TEXT NOT NULL, + modifiers INTEGER NOT NULL, + key TEXT NOT NULL, + created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP + )", + params![], + ) + .map_err(|e| format!("Failed to create hotkeys table: {}", e))?; + + store + .execute( + "CREATE UNIQUE INDEX IF NOT EXISTS idx_hotkeys_command ON hotkeys(command_id)", + params![], + ) + .map_err(|e| e.to_string())?; + + store + .execute( + "CREATE INDEX IF NOT EXISTS idx_hotkeys_lookup ON hotkeys(modifiers, key)", + params![], + ) + .map_err(|e| e.to_string())?; + + // Auto-initialize defaults on first run + let count: i64 = store + .query_row("SELECT COUNT(*) FROM hotkeys", [], |row| row.get(0)) + .map_err(|e| format!("Failed to count hotkeys: {}", e))?; + + if count == 0 { + tracing::info!("First run detected, initializing default hotkeys"); + let defaults = get_default_hotkeys(); + for config in defaults { + store + .execute( + "INSERT INTO hotkeys (command_id, hotkey, modifiers, key) + VALUES (?1, ?2, ?3, ?4)", + params![ + &config.command_id, + &config.hotkey, + config.modifiers, + &config.key + ], + ) + .map_err(|e| format!("Failed to insert default hotkey: {}", e))?; + } + tracing::info!( + "Initialized {} default hotkeys", + get_default_hotkeys().len() + ); + } else { + tracing::info!("Found {} existing hotkeys", count); + } + + tracing::info!("Hotkey manager initialized"); + + Ok(Self { + store: Arc::new(Mutex::new(store)), + registered: Arc::new(Mutex::new(HashMap::new())), + }) + } + + /// Load all hotkeys from database + pub fn get_all_hotkeys(&self) -> Result, String> { + let store = self.store.lock().expect("hotkey store mutex poisoned"); + + let mut stmt = store + .prepare("SELECT command_id, hotkey, modifiers, key FROM hotkeys ORDER BY command_id") + .map_err(|e| e.to_string())?; + + let hotkeys = stmt + .query_map(params![], |row| { + Ok(HotkeyConfig { + command_id: row.get(0)?, + hotkey: row.get(1)?, + modifiers: row.get(2)?, + key: row.get(3)?, + }) + }) + .map_err(|e| e.to_string())? + .collect::, _>>() + .map_err(|e| e.to_string())?; + + Ok(hotkeys) + } + + /// Save a hotkey configuration + pub fn save_hotkey(&self, config: &HotkeyConfig) -> Result<(), String> { + let store = self.store.lock().expect("hotkey store mutex poisoned"); + + store + .execute( + "INSERT OR REPLACE INTO hotkeys (command_id, hotkey, modifiers, key, updated_at) + VALUES (?1, ?2, ?3, ?4, CURRENT_TIMESTAMP)", + params![ + &config.command_id, + &config.hotkey, + config.modifiers, + &config.key + ], + ) + .map_err(|e| format!("Failed to save hotkey: {}", e))?; + + tracing::info!("Saved hotkey for {}: {}", config.command_id, config.hotkey); + Ok(()) + } + + /// Remove a hotkey configuration + pub fn remove_hotkey(&self, command_id: &str) -> Result<(), String> { + let store = self.store.lock().expect("hotkey store mutex poisoned"); + + store + .execute( + "DELETE FROM hotkeys WHERE command_id = ?1", + params![command_id], + ) + .map_err(|e| format!("Failed to remove hotkey: {}", e))?; + + tracing::info!("Removed hotkey for {}", command_id); + Ok(()) + } + + /// Check if a hotkey combination is already in use + pub fn detect_conflict(&self, modifiers: u8, key: &str) -> Result, String> { + let store = self.store.lock().expect("hotkey store mutex poisoned"); + + let mut stmt = store + .prepare("SELECT command_id FROM hotkeys WHERE modifiers = ?1 AND key = ?2") + .map_err(|e| e.to_string())?; + + let result = stmt.query_row(params![modifiers, key], |row| row.get::<_, String>(0)); + + match result { + Ok(command_id) => Ok(Some(command_id)), + Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None), + Err(e) => Err(e.to_string()), + } + } + + /// Register a hotkey with Tauri + pub fn register_shortcut( + &self, + app: &AppHandle, + command_id: String, + shortcut: Shortcut, + ) -> Result<(), String> { + // Register the shortcut + app.global_shortcut() + .register(shortcut) + .map_err(|e| format!("Failed to register hotkey: {}", e))?; + + // Set up the handler + let command_id_clone = command_id.clone(); + app.global_shortcut() + .on_shortcut(shortcut, move |app, _, event| { + if event.state() == ShortcutState::Pressed { + tracing::debug!("Hotkey pressed for command: {}", command_id_clone); + // Emit event to execute command + let _ = app.emit_to( + tauri::EventTarget::labeled("main"), + "execute-command", + &command_id_clone, + ); + } + }) + .map_err(|e| format!("Failed to set hotkey handler: {}", e))?; + + // Track registered shortcut + let mut registered = self + .registered + .lock() + .expect("registered hotkeys mutex poisoned"); + registered.insert(command_id.clone(), shortcut); + + tracing::info!("Registered hotkey for command: {}", command_id); + Ok(()) + } + + /// Unregister a hotkey from Tauri + pub fn unregister_shortcut(&self, app: &AppHandle, command_id: &str) -> Result<(), String> { + let mut registered = self + .registered + .lock() + .expect("registered hotkeys mutex poisoned"); + + if let Some(shortcut) = registered.remove(command_id) { + app.global_shortcut() + .unregister(shortcut) + .map_err(|e| format!("Failed to unregister hotkey: {}", e))?; + + tracing::info!("Unregistered hotkey for command: {}", command_id); + } + + Ok(()) + } + + /// Get the command ID for a registered shortcut + #[allow(dead_code)] + pub fn get_command_for_shortcut(&self, shortcut: &Shortcut) -> Option { + let registered = self + .registered + .lock() + .expect("registered hotkeys mutex poisoned"); + registered + .iter() + .find(|(_, s)| *s == shortcut) + .map(|(cmd, _)| cmd.clone()) + } +} + +/// Convert modifiers bitmask to Tauri Modifiers +pub fn modifiers_from_bits(bits: u8) -> Option { + let mut mods = Modifiers::empty(); + + if bits & 1 != 0 { + mods |= Modifiers::CONTROL; + } + if bits & 2 != 0 { + mods |= Modifiers::ALT; + } + if bits & 4 != 0 { + mods |= Modifiers::SHIFT; + } + if bits & 8 != 0 { + mods |= Modifiers::SUPER; + } + + if mods.is_empty() { + None + } else { + Some(mods) + } +} + +/// Convert Tauri Modifiers to bitmask +#[allow(dead_code)] +pub fn modifiers_to_bits(mods: Modifiers) -> u8 { + let mut bits = 0u8; + + if mods.contains(Modifiers::CONTROL) { + bits |= 1; + } + if mods.contains(Modifiers::ALT) { + bits |= 2; + } + if mods.contains(Modifiers::SHIFT) { + bits |= 4; + } + if mods.contains(Modifiers::SUPER) { + bits |= 8; + } + + bits +} + +/// Convert string to Code (key code) +pub fn string_to_code(key: &str) -> Option { + match key { + "ArrowLeft" => Some(Code::ArrowLeft), + "ArrowRight" => Some(Code::ArrowRight), + "ArrowUp" => Some(Code::ArrowUp), + "ArrowDown" => Some(Code::ArrowDown), + "Space" => Some(Code::Space), + "Enter" => Some(Code::Enter), + "Escape" => Some(Code::Escape), + "Backspace" => Some(Code::Backspace), + "Tab" => Some(Code::Tab), + + // Letters + s if s.starts_with("Key") && s.len() == 4 => { + let letter = s.chars().nth(3)?; + match letter { + 'A' => Some(Code::KeyA), + 'B' => Some(Code::KeyB), + 'C' => Some(Code::KeyC), + 'D' => Some(Code::KeyD), + 'E' => Some(Code::KeyE), + 'F' => Some(Code::KeyF), + 'G' => Some(Code::KeyG), + 'H' => Some(Code::KeyH), + 'I' => Some(Code::KeyI), + 'J' => Some(Code::KeyJ), + 'K' => Some(Code::KeyK), + 'L' => Some(Code::KeyL), + 'M' => Some(Code::KeyM), + 'N' => Some(Code::KeyN), + 'O' => Some(Code::KeyO), + 'P' => Some(Code::KeyP), + 'Q' => Some(Code::KeyQ), + 'R' => Some(Code::KeyR), + 'S' => Some(Code::KeyS), + 'T' => Some(Code::KeyT), + 'U' => Some(Code::KeyU), + 'V' => Some(Code::KeyV), + 'W' => Some(Code::KeyW), + 'X' => Some(Code::KeyX), + 'Y' => Some(Code::KeyY), + 'Z' => Some(Code::KeyZ), + _ => None, + } + } + + // Numbers + "Digit0" => Some(Code::Digit0), + "Digit1" => Some(Code::Digit1), + "Digit2" => Some(Code::Digit2), + "Digit3" => Some(Code::Digit3), + "Digit4" => Some(Code::Digit4), + "Digit5" => Some(Code::Digit5), + "Digit6" => Some(Code::Digit6), + "Digit7" => Some(Code::Digit7), + "Digit8" => Some(Code::Digit8), + "Digit9" => Some(Code::Digit9), + + // Symbols + "Minus" => Some(Code::Minus), + "Equal" => Some(Code::Equal), + "BracketLeft" => Some(Code::BracketLeft), + "BracketRight" => Some(Code::BracketRight), + "Backslash" => Some(Code::Backslash), + "Semicolon" => Some(Code::Semicolon), + "Quote" => Some(Code::Quote), + "Comma" => Some(Code::Comma), + "Period" => Some(Code::Period), + "Slash" => Some(Code::Slash), + + _ => None, + } +} + +/// Format modifiers and key as display string +pub fn format_hotkey(modifiers: u8, key: &str) -> String { + let mut parts = Vec::new(); + + if modifiers & 8 != 0 { + parts.push("Super"); + } + if modifiers & 1 != 0 { + parts.push("Ctrl"); + } + if modifiers & 2 != 0 { + parts.push("Alt"); + } + if modifiers & 4 != 0 { + parts.push("Shift"); + } + + // Format key + let key_display = match key { + "ArrowLeft" => "←", + "ArrowRight" => "→", + "ArrowUp" => "↑", + "ArrowDown" => "↓", + s if s.starts_with("Key") => &s[3..], // "KeyV" -> "V" + s if s.starts_with("Digit") => &s[5..], // "Digit5" -> "5" + s => s, + }; + + parts.push(key_display); + parts.join("+") +} + +/// Get default hotkey configurations +pub fn get_default_hotkeys() -> Vec { + vec![ + // System Commands + HotkeyConfig { + command_id: "builtin:lock-screen".to_string(), + hotkey: "Ctrl+Alt+L".to_string(), + modifiers: 1 | 2, + key: "KeyL".to_string(), + }, + // Built-in Features + HotkeyConfig { + command_id: "builtin:history".to_string(), + hotkey: "Ctrl+Shift+V".to_string(), + modifiers: 1 | 4, // Ctrl + Shift + key: "KeyV".to_string(), + }, + HotkeyConfig { + command_id: "builtin:search-snippets".to_string(), + hotkey: "Ctrl+Shift+S".to_string(), + modifiers: 1 | 4, + key: "KeyS".to_string(), + }, + ] +} + +// Tauri commands + +#[tauri::command] +pub async fn get_hotkey_config(app: AppHandle) -> Result, String> { + let manager = app.state::(); + manager.get_all_hotkeys() +} + +#[tauri::command] +pub async fn set_command_hotkey( + app: AppHandle, + command_id: String, + modifiers: u8, + key: String, +) -> Result<(), String> { + let manager = app.state::(); + + // Check for conflicts + if let Some(conflict) = manager.detect_conflict(modifiers, &key)? { + if conflict != command_id { + return Err(format!("Hotkey already assigned to: {}", conflict)); + } + } + + // Create config + let hotkey_display = format_hotkey(modifiers, &key); + let config = HotkeyConfig { + command_id: command_id.clone(), + hotkey: hotkey_display, + modifiers, + key: key.clone(), + }; + + // Save to database + manager.save_hotkey(&config)?; + + // Unregister old shortcut if exists + let _ = manager.unregister_shortcut(&app, &command_id); + + // Register new shortcut + let mods = modifiers_from_bits(modifiers).ok_or("Invalid modifiers")?; + let code = string_to_code(&key).ok_or("Invalid key code")?; + let shortcut = Shortcut::new(Some(mods), code); + + manager.register_shortcut(&app, command_id, shortcut)?; + + Ok(()) +} + +#[tauri::command] +pub async fn remove_command_hotkey(app: AppHandle, command_id: String) -> Result<(), String> { + let manager = app.state::(); + + // Unregister from Tauri + manager.unregister_shortcut(&app, &command_id)?; + + // Remove from database + manager.remove_hotkey(&command_id)?; + + Ok(()) +} + +#[tauri::command] +pub async fn check_hotkey_conflict( + app: AppHandle, + modifiers: u8, + key: String, +) -> Result, String> { + let manager = app.state::(); + manager.detect_conflict(modifiers, &key) +} + +#[tauri::command] +pub async fn reset_hotkeys_to_defaults(app: AppHandle) -> Result<(), String> { + let manager = app.state::(); + + // Get all current hotkeys and unregister them + let current = manager.get_all_hotkeys()?; + for config in current { + let _ = manager.unregister_shortcut(&app, &config.command_id); + let _ = manager.remove_hotkey(&config.command_id); + } + + // Apply defaults + let defaults = get_default_hotkeys(); + for config in defaults { + manager.save_hotkey(&config)?; + + let mods = modifiers_from_bits(config.modifiers).ok_or("Invalid modifiers")?; + let code = string_to_code(&config.key).ok_or("Invalid key code")?; + let shortcut = Shortcut::new(Some(mods), code); + + let _ = manager.register_shortcut(&app, config.command_id, shortcut); + } + + Ok(()) +} diff --git a/src-tauri/src/integrations/github/auth.rs b/src-tauri/src/integrations/github/auth.rs new file mode 100644 index 00000000..032313ea --- /dev/null +++ b/src-tauri/src/integrations/github/auth.rs @@ -0,0 +1,157 @@ +use serde::{Deserialize, Serialize}; +use std::time::Duration; + +const GITHUB_CLIENT_ID: &str = "Ov23liLBXQcwvZPYjDGh"; // Flareup GitHub OAuth App +const DEVICE_CODE_URL: &str = "https://github.com/login/device/code"; +const ACCESS_TOKEN_URL: &str = "https://github.com/login/oauth/access_token"; +#[allow(dead_code)] +const POLL_INTERVAL: Duration = Duration::from_secs(5); + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DeviceCodeResponse { + pub device_code: String, + pub user_code: String, + pub verification_uri: String, + pub expires_in: u64, + pub interval: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(untagged)] +pub enum TokenResponse { + Success { + access_token: String, + token_type: String, + scope: String, + }, + Pending { + error: String, + error_description: String, + }, +} + +/// Start the OAuth device flow by requesting a device code +pub async fn start_device_flow() -> Result { + let client = reqwest::Client::new(); + + let params = [ + ("client_id", GITHUB_CLIENT_ID), + ("scope", "repo user notifications"), + ]; + + let response = client + .post(DEVICE_CODE_URL) + .header("Accept", "application/json") + .form(¶ms) + .send() + .await + .map_err(|e| format!("Failed to request device code: {}", e))?; + + if !response.status().is_success() { + return Err(format!("GitHub API error: {}", response.status())); + } + + let device_code: DeviceCodeResponse = response + .json() + .await + .map_err(|e| format!("Failed to parse device code response: {}", e))?; + + Ok(device_code) +} + +/// Poll for the access token using the device code +pub async fn poll_for_token(device_code: &str) -> Result, String> { + let client = reqwest::Client::new(); + + let params = [ + ("client_id", GITHUB_CLIENT_ID), + ("device_code", device_code), + ("grant_type", "urn:ietf:params:oauth:grant-type:device_code"), + ]; + + let response = client + .post(ACCESS_TOKEN_URL) + .header("Accept", "application/json") + .form(¶ms) + .send() + .await + .map_err(|e| format!("Failed to poll for token: {}", e))?; + + if !response.status().is_success() { + return Err(format!("GitHub API error: {}", response.status())); + } + + let token_response: TokenResponse = response + .json() + .await + .map_err(|e| format!("Failed to parse token response: {}", e))?; + + match token_response { + TokenResponse::Success { access_token, .. } => Ok(Some(access_token)), + TokenResponse::Pending { error, .. } => { + if error == "authorization_pending" || error == "slow_down" { + Ok(None) // Still waiting for user authorization + } else if error == "expired_token" { + Err( + "Device code expired. Please start the authentication process again." + .to_string(), + ) + } else if error == "access_denied" { + Err("User denied authorization.".to_string()) + } else { + Err(format!("Authentication error: {}", error)) + } + } + } +} + +/// Store the GitHub access token in the keyring +pub fn store_token(token: &str) -> Result<(), String> { + let entry = keyring::Entry::new("flareup", "github") + .map_err(|e| format!("Failed to create keyring entry: {}", e))?; + + entry + .set_password(token) + .map_err(|e| format!("Failed to store token: {}", e))?; + + Ok(()) +} + +/// Retrieve the GitHub access token from the keyring +pub fn get_token() -> Result, String> { + let entry = keyring::Entry::new("flareup", "github") + .map_err(|e| format!("Failed to create keyring entry: {}", e))?; + + match entry.get_password() { + Ok(token) => Ok(Some(token)), + Err(keyring::Error::NoEntry) => Ok(None), + Err(e) => Err(format!("Failed to retrieve token: {}", e)), + } +} + +/// Delete the GitHub access token from the keyring +pub fn delete_token() -> Result<(), String> { + let entry = keyring::Entry::new("flareup", "github") + .map_err(|e| format!("Failed to create keyring entry: {}", e))?; + + match entry.delete_credential() { + Ok(()) => Ok(()), + Err(keyring::Error::NoEntry) => Ok(()), // Already deleted + Err(e) => Err(format!("Failed to delete token: {}", e)), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_device_code_url() { + assert_eq!(DEVICE_CODE_URL, "https://github.com/login/device/code"); + } + + #[test] + fn test_client_id() { + assert!(!GITHUB_CLIENT_ID.is_empty()); + } +} diff --git a/src-tauri/src/integrations/github/issues.rs b/src-tauri/src/integrations/github/issues.rs new file mode 100644 index 00000000..337a178d --- /dev/null +++ b/src-tauri/src/integrations/github/issues.rs @@ -0,0 +1,182 @@ +use super::{types::*, GitHubClient}; + +impl GitHubClient { + /// List issues for a repository + pub async fn list_issues( + &self, + owner: &str, + repo: &str, + state: Option<&str>, + ) -> Result, String> { + let mut path = format!("/repos/{}/{}/issues", owner, repo); + + if let Some(state) = state { + path.push_str(&format!("?state={}", state)); + } + + let response = self + .build_request(reqwest::Method::GET, &path) + .send() + .await + .map_err(|e| format!("Failed to list issues: {}", e))?; + + if !response.status().is_success() { + return Err(format!("GitHub API error: {}", response.status())); + } + + response + .json() + .await + .map_err(|e| format!("Failed to parse issues response: {}", e)) + } + + /// Get a specific issue + pub async fn get_issue(&self, owner: &str, repo: &str, number: u64) -> Result { + let path = format!("/repos/{}/{}/issues/{}", owner, repo, number); + + let response = self + .build_request(reqwest::Method::GET, &path) + .send() + .await + .map_err(|e| format!("Failed to get issue: {}", e))?; + + if !response.status().is_success() { + return Err(format!("GitHub API error: {}", response.status())); + } + + response + .json() + .await + .map_err(|e| format!("Failed to parse issue response: {}", e)) + } + + /// Create a new issue + pub async fn create_issue( + &self, + owner: &str, + repo: &str, + title: String, + body: Option, + labels: Option>, + assignees: Option>, + ) -> Result { + let path = format!("/repos/{}/{}/issues", owner, repo); + + let mut payload = serde_json::json!({ + "title": title, + }); + + if let Some(body) = body { + payload["body"] = serde_json::json!(body); + } + + if let Some(labels) = labels { + payload["labels"] = serde_json::json!(labels); + } + + if let Some(assignees) = assignees { + payload["assignees"] = serde_json::json!(assignees); + } + + let response = self + .build_request(reqwest::Method::POST, &path) + .json(&payload) + .send() + .await + .map_err(|e| format!("Failed to create issue: {}", e))?; + + if !response.status().is_success() { + let status = response.status(); + let error_text = response.text().await.unwrap_or_default(); + return Err(format!("GitHub API error {}: {}", status, error_text)); + } + + response + .json() + .await + .map_err(|e| format!("Failed to parse created issue response: {}", e)) + } + + /// Update an existing issue + pub async fn update_issue( + &self, + owner: &str, + repo: &str, + number: u64, + title: Option, + body: Option, + state: Option<&str>, + labels: Option>, + assignees: Option>, + ) -> Result { + let path = format!("/repos/{}/{}/issues/{}", owner, repo, number); + + let mut payload = serde_json::json!({}); + + if let Some(title) = title { + payload["title"] = serde_json::json!(title); + } + + if let Some(body) = body { + payload["body"] = serde_json::json!(body); + } + + if let Some(state) = state { + payload["state"] = serde_json::json!(state); + } + + if let Some(labels) = labels { + payload["labels"] = serde_json::json!(labels); + } + + if let Some(assignees) = assignees { + payload["assignees"] = serde_json::json!(assignees); + } + + let response = self + .build_request(reqwest::Method::PATCH, &path) + .json(&payload) + .send() + .await + .map_err(|e| format!("Failed to update issue: {}", e))?; + + if !response.status().is_success() { + return Err(format!("GitHub API error: {}", response.status())); + } + + response + .json() + .await + .map_err(|e| format!("Failed to parse updated issue response: {}", e)) + } + + /// Close an issue + pub async fn close_issue(&self, owner: &str, repo: &str, number: u64) -> Result { + self.update_issue(owner, repo, number, None, None, Some("closed"), None, None) + .await + } + + /// List issues assigned to the authenticated user + pub async fn list_my_issues(&self, state: Option<&str>) -> Result, String> { + let mut path = "/issues".to_string(); + + if let Some(state) = state { + path.push_str(&format!("?state={}", state)); + } + + let response = self + .build_request(reqwest::Method::GET, &path) + .send() + .await + .map_err(|e| format!("Failed to list my issues: {}", e))?; + + if !response.status().is_success() { + return Err(format!("GitHub API error: {}", response.status())); + } + + response + .json() + .await + .map_err(|e| format!("Failed to parse my issues response: {}", e)) + } +} diff --git a/src-tauri/src/integrations/github/mod.rs b/src-tauri/src/integrations/github/mod.rs new file mode 100644 index 00000000..e6fd45af --- /dev/null +++ b/src-tauri/src/integrations/github/mod.rs @@ -0,0 +1,63 @@ +pub mod auth; +pub mod issues; +pub mod search; +pub mod types; + +pub use auth::{ + delete_token, get_token, poll_for_token, start_device_flow, store_token, DeviceCodeResponse, +}; +pub use types::*; + +use reqwest::Client; + +const GITHUB_API_BASE: &str = "https://api.github.com"; +const GITHUB_API_VERSION: &str = "2022-11-28"; + +pub struct GitHubClient { + token: String, + http_client: Client, +} + +impl GitHubClient { + pub fn new(token: String) -> Self { + Self { + token, + http_client: Client::new(), + } + } + + /// Create a new client from stored token + pub fn from_stored_token() -> Result { + let token = get_token()?.ok_or("No GitHub token found. Please authenticate first.")?; + Ok(Self::new(token)) + } + + /// Helper to build authenticated requests + fn build_request(&self, method: reqwest::Method, path: &str) -> reqwest::RequestBuilder { + let url = format!("{}{}", GITHUB_API_BASE, path); + self.http_client + .request(method, &url) + .header("Authorization", format!("Bearer {}", self.token)) + .header("Accept", "application/vnd.github+json") + .header("X-GitHub-Api-Version", GITHUB_API_VERSION) + .header("User-Agent", "Flareup") + } + + /// Test the authentication by getting the current user + pub async fn get_current_user(&self) -> Result { + let response = self + .build_request(reqwest::Method::GET, "/user") + .send() + .await + .map_err(|e| format!("Failed to get current user: {}", e))?; + + if !response.status().is_success() { + return Err(format!("GitHub API error: {}", response.status())); + } + + response + .json() + .await + .map_err(|e| format!("Failed to parse user response: {}", e)) + } +} diff --git a/src-tauri/src/integrations/github/search.rs b/src-tauri/src/integrations/github/search.rs new file mode 100644 index 00000000..470c96fe --- /dev/null +++ b/src-tauri/src/integrations/github/search.rs @@ -0,0 +1,83 @@ +use super::{types::*, GitHubClient}; + +impl GitHubClient { + /// Search for issues and pull requests + pub async fn search_issues(&self, query: &str) -> Result, String> { + let path = format!("/search/issues?q={}", urlencoding::encode(query)); + + let response = self + .build_request(reqwest::Method::GET, &path) + .send() + .await + .map_err(|e| format!("Failed to search issues: {}", e))?; + + if !response.status().is_success() { + return Err(format!("GitHub API error: {}", response.status())); + } + + response + .json() + .await + .map_err(|e| format!("Failed to parse search results: {}", e)) + } + + /// Search for repositories + pub async fn search_repos(&self, query: &str) -> Result, String> { + let path = format!("/search/repositories?q={}", urlencoding::encode(query)); + + let response = self + .build_request(reqwest::Method::GET, &path) + .send() + .await + .map_err(|e| format!("Failed to search repositories: {}", e))?; + + if !response.status().is_success() { + return Err(format!("GitHub API error: {}", response.status())); + } + + response + .json() + .await + .map_err(|e| format!("Failed to parse search results: {}", e)) + } + + /// List repositories for the authenticated user + pub async fn list_user_repos(&self) -> Result, String> { + let path = "/user/repos?per_page=100&sort=updated"; + + let response = self + .build_request(reqwest::Method::GET, path) + .send() + .await + .map_err(|e| format!("Failed to list repositories: {}", e))?; + + if !response.status().is_success() { + return Err(format!("GitHub API error: {}", response.status())); + } + + response + .json() + .await + .map_err(|e| format!("Failed to parse repositories response: {}", e)) + } + + /// Get a specific repository + pub async fn get_repo(&self, owner: &str, repo: &str) -> Result { + let path = format!("/repos/{}/{}", owner, repo); + + let response = self + .build_request(reqwest::Method::GET, &path) + .send() + .await + .map_err(|e| format!("Failed to get repository: {}", e))?; + + if !response.status().is_success() { + return Err(format!("GitHub API error: {}", response.status())); + } + + response + .json() + .await + .map_err(|e| format!("Failed to parse repository response: {}", e)) + } +} diff --git a/src-tauri/src/integrations/github/types.rs b/src-tauri/src/integrations/github/types.rs new file mode 100644 index 00000000..511689fa --- /dev/null +++ b/src-tauri/src/integrations/github/types.rs @@ -0,0 +1,122 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct User { + pub id: u64, + pub login: String, + pub avatar_url: String, + pub html_url: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Label { + pub id: u64, + pub name: String, + pub color: String, + pub description: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum IssueState { + Open, + Closed, +} + +impl IssueState { + #[allow(dead_code)] + pub fn as_str(&self) -> &str { + match self { + IssueState::Open => "open", + IssueState::Closed => "closed", + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Issue { + pub id: u64, + pub number: u64, + pub title: String, + pub body: Option, + pub state: IssueState, + pub user: User, + pub labels: Vec